Exemple #1
0
    def grad(self,inputs, output_gradients):
        C,d, WShape, B = inputs
        dLdA ,= output_gradients

        z = T.zeros_like(C[0,0,0,0,:])
        dLdC = convTransp3D( dLdA, z, d, B, C.shape[1:4])
        dLdd = None #not differentiable, since d is not continuous
        dLdWShape = None #not differentiable, since d is not continuous
        dLdB = conv3D( C, dLdA, T.zeros_like(B[0,0,0,0,:]), d)

        return [ dLdC, dLdd, dLdWShape, dLdB ]
Exemple #2
0
    def grad(self, inputs, output_gradients):
        C, d, WShape, B = inputs
        dLdA, = output_gradients

        z = T.zeros_like(C[0, 0, 0, 0, :])
        dLdC = convTransp3D(dLdA, z, d, B, C.shape[1:4])
        # d actually does affect the outputs, so it's not disconnected
        dLdd = grad_undefined(self, 1, d)
        # The shape of the weights doesn't affect the output elements
        dLdWShape = DisconnectedType()()
        dLdB = conv3D(C, dLdA, T.zeros_like(B[0, 0, 0, 0, :]), d)

        return [dLdC, dLdd, dLdWShape, dLdB]
Exemple #3
0
    def grad(self, inputs, output_gradients):
        C, d, WShape, B = inputs
        dLdA, = output_gradients

        z = T.zeros_like(C[0, 0, 0, 0, :])
        dLdC = convTransp3D(dLdA, z, d, B, C.shape[1:4])
        # d actually does affect the outputs, so it's not disconnected
        dLdd = grad_undefined(self, 1, d)
        # The shape of the weights doesn't affect the output elements
        dLdWShape = DisconnectedType()()
        dLdB = conv3D(C, dLdA, T.zeros_like(B[0, 0, 0, 0, :]), d)

        return [dLdC, dLdd, dLdWShape, dLdB]