worker.init_shared_params(params, param_sync_rule=platoon_sync_rule) for param in params: print param.get_value().shape variational_loss = 0.5 * T.sum(z_mean**2 + z_var - T.log(z_var) - 1.0) square_loss = config['square_loss_weight'] * 1.0 * T.mean(T.sqr(x - x_reconstructed)) loss = 0.0 loss += square_loss netDist = NetDist(x, x_reconstructed, config) if config['style_weight'] > 0.0: style_loss = config['style_weight'] * netDist.get_dist_style() else: style_loss = theano.shared(np.asarray(0.0).astype('float32')) if config['content_weight'] > 0.0: content_loss = config['content_weight'] * netDist.get_dist_content() else: content_loss = theano.shared(np.asarray(0.0).astype('float32')) loss += style_loss + content_loss loss += 1.0 * variational_loss
print param.get_value().shape variational_loss = config['vae_weight'] * 0.5 * T.sum(z_mean**2 + z_var - T.log(z_var) - 1.0) #smoothness_penalty = 0.001 * (total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,0:1,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,1:2,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,2:3,:,:])) square_loss = config['square_loss_weight'] * 1.0 * T.sum(T.sqr(normalize(x) - normalize(x_reconstructed))) loss = 0.0 loss += l2_loss loss += square_loss netDist = NetDist(x, x_reconstructed, config) if config['style_weight'] > 0.0: style_loss, style_out_1, style_out_2 = netDist.get_dist_style() style_loss *= config['style_weight'] else: style_loss = style_out_1 = style_out_2 = theano.shared(np.asarray(0.0).astype('float32')) if config['content_weight'] > 0.0: content_loss = config['content_weight'] * netDist.get_dist_content() else: content_loss = theano.shared(np.asarray(0.0).astype('float32')) #128 x 3 x 96 x 96 imgGrad = T.grad(style_loss + content_loss, x_reconstructed)[0,:,:,0]
variational_loss = config['vae_weight'] * 0.5 * T.sum(z_mean**2 + z_var - T.log(z_var) - 1.0) smoothness_penalty = 0.0 * (total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,0:1,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,1:2,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,2:3,:,:])) raw_square_loss = T.sum(T.sqr(normalize(x) - normalize(x_reconstructed))) square_loss = raw_square_loss * config['square_loss_weight'] loss = 0.0 loss += l2_loss loss += square_loss netDist = NetDist(x, x_reconstructed, config) if config['style_weight'] > 0.0: style_loss, style_out_1, style_out_2 = netDist.get_dist_style() style_loss *= config['style_weight'] else: style_loss = style_out_1 = style_out_2 = theano.shared(np.asarray(0.0).astype('float32')) if config['content_weight'] > 0.0: content_loss_values, varLst = netDist.get_dist_content() content_loss = sum(content_loss_values.values()) * config['content_weight'] params += varLst else: content_loss = theano.shared(np.asarray(0.0).astype('float32')) content_loss_values, varLst = netDist.get_dist_content()