def grad(self, inputs, output_gradients): W, b, d, H, RShape = inputs dCdR, = output_gradients dCdH = conv3D(dCdR, W, T.zeros_like(H[0, 0, 0, 0, :]), d) WShape = W.shape dCdW = convGrad3D(dCdR, d, WShape, H) dCdb = T.sum(dCdR, axis=(0, 1, 2, 3)) dCdd = None #not differentiable, since d is not continuous dCdRShape = None #not differentiable, since RShape is not continuous if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon' dCdW.name = 'ConvTransp3D_dCdW.H=' + H_name + ',dCdR=' + dCdR_name + ',W=' + W_name dCdb.name = 'ConvTransp3D_dCdb.H=' + H_name + ',dCdR=' + dCdR_name + ',W=' + W_name + ',b=' + b_name dCdH.name = 'ConvTransp3D_dCdH.H=' + H_name + ',dCdR=' + dCdR_name return [dCdW, dCdb, dCdd, dCdH, dCdRShape]
def grad(self,inputs, output_gradients): C,d, WShape, B = inputs dLdA ,= output_gradients z = T.zeros_like(C[0,0,0,0,:]) dLdC = convTransp3D( dLdA, z, d, B, C.shape[1:4]) dLdd = None #not differentiable, since d is not continuous dLdWShape = None #not differentiable, since d is not continuous dLdB = conv3D( C, dLdA, T.zeros_like(B[0,0,0,0,:]), d) return [ dLdC, dLdd, dLdWShape, dLdB ]
def grad(self, inputs, output_gradients): C, d, WShape, B = inputs dLdA, = output_gradients z = T.zeros_like(C[0, 0, 0, 0, :]) dLdC = convTransp3D(dLdA, z, d, B, C.shape[1:4]) # d actually does affect the outputs, so it's not disconnected dLdd = grad_undefined(self, 1, d) # The shape of the weights doesn't affect the output elements dLdWShape = DisconnectedType()() dLdB = conv3D(C, dLdA, T.zeros_like(B[0, 0, 0, 0, :]), d) return [dLdC, dLdd, dLdWShape, dLdB]
def grad(self, inputs, output_gradients): W, b, d, H, RShape = inputs dCdR, = output_gradients dCdH = conv3D(dCdR, W, T.zeros_like(H[0, 0, 0, 0, :]), d) WShape = W.shape dCdW = convGrad3D(dCdR, d, WShape, H) dCdb = T.sum(dCdR, axis=(0, 1, 2, 3)) # not differentiable, since d affects the output elements dCdd = grad_undefined(self, 2, d) # disconnected, since RShape just determines the output shape dCdRShape = DisconnectedType()() if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon_dCdR' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon_H' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon_W' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon_b' dCdW.name = 'ConvTransp3D_dCdW.H='+H_name+',dCdR='+dCdR_name+',W='+W_name dCdb.name = 'ConvTransp3D_dCdb.H='+H_name+',dCdR='+dCdR_name+',W='+W_name+',b='+b_name dCdH.name = 'ConvTransp3D_dCdH.H=' + H_name + ',dCdR=' + dCdR_name return [dCdW, dCdb, dCdd, dCdH, dCdRShape]
def grad(self,inputs, output_gradients): W,b,d,H, RShape = inputs dCdR ,= output_gradients dCdH = conv3D( dCdR, W, T.zeros_like(H[0,0,0,0,:]), d) WShape = W.shape dCdW = convGrad3D(dCdR,d,WShape,H) dCdb = T.sum(dCdR,axis=(0,1,2,3)) dCdd = None #not differentiable, since d is not continuous dCdRShape = None #not differentiable, since RShape is not continuous if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon' dCdW.name = 'ConvTransp3D_dCdW.H='+H_name+',dCdR='+dCdR_name+',W='+W_name dCdb.name = 'ConvTransp3D_dCdb.H='+H_name+',dCdR='+dCdR_name+',W='+W_name+',b='+b_name dCdH.name = 'ConvTransp3D_dCdH.H='+H_name+',dCdR='+dCdR_name return [ dCdW, dCdb, dCdd, dCdH, dCdRShape ]