def grad(self, inputs, output_gradients): W, b, d, H, RShape = inputs dCdR, = output_gradients dCdH = conv3D(dCdR, W, T.zeros_like(H[0, 0, 0, 0, :]), d) WShape = W.shape dCdW = convGrad3D(dCdR, d, WShape, H) dCdb = T.sum(dCdR, axis=(0, 1, 2, 3)) dCdd = None #not differentiable, since d is not continuous dCdRShape = None #not differentiable, since RShape is not continuous if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon' dCdW.name = 'ConvTransp3D_dCdW.H=' + H_name + ',dCdR=' + dCdR_name + ',W=' + W_name dCdb.name = 'ConvTransp3D_dCdb.H=' + H_name + ',dCdR=' + dCdR_name + ',W=' + W_name + ',b=' + b_name dCdH.name = 'ConvTransp3D_dCdH.H=' + H_name + ',dCdR=' + dCdR_name return [dCdW, dCdb, dCdd, dCdH, dCdRShape]
def grad(self, inputs, output_gradients): W, b, d, H, RShape = inputs dCdR, = output_gradients dCdH = conv3D(dCdR, W, T.zeros_like(H[0, 0, 0, 0, :]), d) WShape = W.shape dCdW = convGrad3D(dCdR, d, WShape, H) dCdb = T.sum(dCdR, axis=(0, 1, 2, 3)) # not differentiable, since d affects the output elements dCdd = grad_undefined(self, 2, d) # disconnected, since RShape just determines the output shape dCdRShape = DisconnectedType()() if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon_dCdR' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon_H' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon_W' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon_b' dCdW.name = 'ConvTransp3D_dCdW.H='+H_name+',dCdR='+dCdR_name+',W='+W_name dCdb.name = 'ConvTransp3D_dCdb.H='+H_name+',dCdR='+dCdR_name+',W='+W_name+',b='+b_name dCdH.name = 'ConvTransp3D_dCdH.H=' + H_name + ',dCdR=' + dCdR_name return [dCdW, dCdb, dCdd, dCdH, dCdRShape]
def grad(self,inputs, output_gradients): W,b,d,H, RShape = inputs dCdR ,= output_gradients dCdH = conv3D( dCdR, W, T.zeros_like(H[0,0,0,0,:]), d) WShape = W.shape dCdW = convGrad3D(dCdR,d,WShape,H) dCdb = T.sum(dCdR,axis=(0,1,2,3)) dCdd = None #not differentiable, since d is not continuous dCdRShape = None #not differentiable, since RShape is not continuous if 'name' in dir(dCdR) and dCdR.name is not None: dCdR_name = dCdR.name else: dCdR_name = 'anon' if 'name' in dir(H) and H.name is not None: H_name = H.name else: H_name = 'anon' if 'name' in dir(W) and W.name is not None: W_name = W.name else: W_name = 'anon' if 'name' in dir(b) and b.name is not None: b_name = b.name else: b_name = 'anon' dCdW.name = 'ConvTransp3D_dCdW.H='+H_name+',dCdR='+dCdR_name+',W='+W_name dCdb.name = 'ConvTransp3D_dCdb.H='+H_name+',dCdR='+dCdR_name+',W='+W_name+',b='+b_name dCdH.name = 'ConvTransp3D_dCdH.H='+H_name+',dCdR='+dCdR_name return [ dCdW, dCdb, dCdd, dCdH, dCdRShape ]