Beispiel #1
0
    def __l1Loss(self):
        #outputs have shape [?, height, width, 12]
        #targets have shape [?, height, width, 12]
        outputsNormal = self.outputs[:,:,:,0:3]
        outputsDiffuse = tf.log(epsilonL1 + helpers.deprocess(self.outputs[:,:,:,3:6]))
        outputsRoughness = self.outputs[:,:,:,6:9]
        outputsSpecular = tf.log(epsilonL1 + helpers.deprocess(self.outputs[:,:,:,9:12]))

        targetsNormal = self.targets[:,:,:,0:3]
        targetsDiffuse = tf.log(epsilonL1 + helpers.deprocess(self.targets[:,:,:,3:6]))
        targetsRoughness = self.targets[:,:,:,6:9]
        targetsSpecular = tf.log(epsilonL1 + helpers.deprocess(self.targets[:,:,:,9:12]))

        return l1(outputsNormal, targetsNormal) + l1(outputsDiffuse, targetsDiffuse) + l1(outputsRoughness, targetsRoughness) + l1(outputsSpecular, targetsSpecular)
Beispiel #2
0
def deprocess_images_fullTest(inputs, targets, nbTargets):
    targets = helpers.deprocess(targets)
    with tf.name_scope("transform_images"):
        targetShape = targets.get_shape()
        targets_reshaped = concat_tensor_display(targets,
                                                 axisToConcat=2,
                                                 axisToSplit=1)
        tensorOneInput, SurfaceLightFixedView, HemishpereLightFixedView = concatSplitInputs(
            inputs, axisToConcat=2,
            axisToSplit=1)  #HemishpereLightHemisphereView

        tensorOneInput = tf.concat([tensorOneInput, targets_reshaped], axis=2)
        SurfaceLightFixedView = tf.concat(
            [SurfaceLightFixedView, targets_reshaped], axis=2)
        HemishpereLightFixedView = tf.concat(
            [HemishpereLightFixedView, targets_reshaped], axis=2)
        #HemishpereLightHemisphereView  = tf.concat([HemishpereLightHemisphereView, targets_reshaped], axis = 2)

    with tf.name_scope("convert_images"):
        tensorOneInput = helpers.convert(tensorOneInput)
        SurfaceLightFixedView = helpers.convert(SurfaceLightFixedView)
        HemishpereLightFixedView = helpers.convert(HemishpereLightFixedView)
        #HemishpereLightHemisphereView = helpers.convert(HemishpereLightHemisphereView)

    return tensorOneInput, SurfaceLightFixedView, HemishpereLightFixedView  #, HemishpereLightHemisphereView
Beispiel #3
0
    def tf_calculateBRDF(self, svbrdf, wiNorm, woNorm, currentConeTargetPos,
                         currentLightPos, multiLight):

        h = helpers.tf_Normalize(tf.add(wiNorm, woNorm) / 2.0)
        #Put all the parameter values between 0 and 1 except the normal as they should be used between -1 and 1 (as they express a direction in a 360° sphere)
        diffuse = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 3:6]), 0.0, 1.0),
                                 axis=1)
        normals = tf.expand_dims(svbrdf[:, :, :, 0:3], axis=1)
        normals = helpers.tf_Normalize(normals)
        specular = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 9:12]), 0.0, 1.0),
                                  axis=1)
        roughness = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 6:9]), 0.0, 1.0),
                                   axis=1)
        #Avoid roughness = 0 to avoid division by 0
        roughness = tf.maximum(roughness, 0.001)

        #If we have multiple lights to render, add a dimension to handle it.
        if multiLight:
            diffuse = tf.expand_dims(diffuse, axis=1)
            normals = tf.expand_dims(normals, axis=1)
            specular = tf.expand_dims(specular, axis=1)
            roughness = tf.expand_dims(roughness, axis=1)

        NdotH = helpers.tf_DotProduct(normals, h)
        NdotL = helpers.tf_DotProduct(normals, wiNorm)
        NdotV = helpers.tf_DotProduct(normals, woNorm)

        VdotH = helpers.tf_DotProduct(woNorm, h)

        diffuse_rendered = self.tf_diffuse(diffuse, specular)
        D_rendered = self.tf_D(roughness, tf.maximum(0.0, NdotH))
        G_rendered = self.tf_G(roughness, tf.maximum(0.0, NdotL),
                               tf.maximum(0.0, NdotV))
        F_rendered = self.tf_F(specular, tf.maximum(0.0, VdotH))

        specular_rendered = F_rendered * (G_rendered * D_rendered * 0.25)
        result = specular_rendered

        #Add the diffuse part of the rendering if required.
        if self.includeDiffuse:
            result = result + diffuse_rendered
        return result, NdotL
Beispiel #4
0
    def tf_calculateBRDF(self, svbrdf, wiNorm, woNorm, currentConeTargetPos,
                         currentLightPos, multiLight, lossRendering):

        h = helpers.tf_Normalize(tf.add(wiNorm, woNorm) / 2.0)
        diffuse = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 3:6]), 0.0, 1.0),
                                 axis=1)
        normals = tf.expand_dims(svbrdf[:, :, :, 0:3], axis=1)
        normals = helpers.tf_Normalize(normals)
        specular = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 9:12]), 0.0, 1.0),
                                  axis=1)
        roughness = tf.expand_dims(helpers.squeezeValues(
            helpers.deprocess(svbrdf[:, :, :, 6:9]), 0.0, 1.0),
                                   axis=1)
        roughness = tf.maximum(roughness, 0.001)

        #This is the simulation of ambient lighting for fine tuning mostly
        if not lossRendering:
            diffuse = diffuse + (0.15 * specular)

        if multiLight:
            diffuse = tf.expand_dims(diffuse, axis=1)
            normals = tf.expand_dims(normals, axis=1)
            specular = tf.expand_dims(specular, axis=1)
            roughness = tf.expand_dims(roughness, axis=1)

        NdotH = helpers.tf_DotProduct(normals, h)
        NdotL = helpers.tf_DotProduct(normals, wiNorm)
        NdotV = helpers.tf_DotProduct(normals, woNorm)

        VdotH = helpers.tf_DotProduct(woNorm, h)

        diffuse_rendered = self.tf_diffuse(diffuse, specular)
        D_rendered = self.tf_D(roughness, tf.maximum(0.0, NdotH))
        G_rendered = self.tf_G(roughness, tf.maximum(0.0, NdotL),
                               tf.maximum(0.0, NdotV))
        F_rendered = self.tf_F(specular, tf.maximum(0.0, VdotH))

        specular_rendered = F_rendered * (G_rendered * D_rendered * 0.25)
        result = specular_rendered
        if self.includeDiffuse:
            result = result + diffuse_rendered
        return result, NdotL
Beispiel #5
0
def deprocess_images(inputs, targets, nbTargets):
    #inputs = helpers.deprocess(inputs)
    targets = helpers.deprocess(targets)

    with tf.name_scope("transform_images"):
        targetShape = targets.get_shape()
        targets_reshaped = concat_tensor_display(targets,
                                                 axisToConcat=2,
                                                 axisToSplit=1)
        inputs_reshaped = tf.reshape(inputs, [
            -1,
            int(inputs.get_shape()[2]),
            int(inputs.get_shape()[3]),
            int(inputs.get_shape()[4])
        ])

        tensorToSave = tf.concat([inputs_reshaped, targets_reshaped], axis=2)
    with tf.name_scope("convert_images"):
        tensorToSave = helpers.convert(tensorToSave)

    return tensorToSave