示例#1
0
    def partGrad(self, partial, prevOp):
        if (partial.type != "Variable"):
            raise Exception("partial参数必须是Variable类型")
        if (self.catch and self._grads.get(partial.name, None)):
            return self._grads[partial.name]
        if (prevOp is None): prevOp = Constant(dim.ones(self.eval().shape))

        part1 = Constant(dim.ones(self.left.eval().shape))
        part2 = dim.autograd.MulOperate.wrapper(part1, prevOp)
        part3 = self.left.partGrad(partial, part2)
        rst = part3
        self._grads[partial.name] = rst
        return rst
示例#2
0
 def partGrad(self,partial,prevOp):
   if (partial.type!="Variable"): raise Exception("partial参数必须是Variable类型")
   if (self.catch and self._grads.get(partial.name,None)): return self._grads[partial.name]
   if (prevOp is None):
     data=self.eval()
     if isinstance(data,dim.Vector):
       prevOp=Constant(dim.ones(data.shape))
     else:
       prevOp=Constant(dim.cl.ones(data.shape))  
   if (self.catch and self._grads.get(self.left.name,None)): 
     part1=self._grads[self.left.name]
   else:
     part1 = dim.autograd.MulOperate.wrapper(self.right,prevOp)
     self._grads[self.left.name]=part1
   part2 = self.left.partGrad(partial,part1)
       
   if (self.catch and self._grads.get(self.right.name,None)): 
     part3=self._grads[self.right.name]
   else:
     part3 = dim.autograd.MulOperate.wrapper(self.left,prevOp)
     self._grads[self.right.name]=part3
   part4 = self.right.partGrad(partial,part3)
   '''
   part1 = dim.autograd.MulOperate.wrapper(self.right,prevOp)
   part2 = self.left.partGrad(partial,part1)
   part3 = dim.autograd.MulOperate.wrapper(self.left,prevOp)
   part4 = self.right.partGrad(partial,part3)
   '''
   rst = dim.autograd.AddOperate.wrapper(part2,part4)
   self._grads[partial.name]=rst
   return rst
示例#3
0
 def partGrad(self,partial,prevOp):
   if (partial.type!="Variable"): raise Exception("partial参数必须是Variable类型")
   if (self.catch and self._grads.get(partial.name,None)): return self._grads[partial.name]
   if (prevOp is None): prevOp=Constant(dim.ones(self.eval().shape))
   
   raise Exception("not implemented")
   
   self._grads[partial.name]=rst
   return rst  
示例#4
0
    def partGrad(self, partial, prevOp):
        if (partial.type != "Variable"):
            raise Exception("partial参数必须是Variable类型")
        if (self.catch and self._grads.get(partial.name, None)):
            #print("use catch",partial.name,self._grads[partial.name])
            return self._grads[partial.name]
        if (prevOp is None):
            data = self.eval()
            if isinstance(data, dim.Vector):
                prevOp = Constant(dim.ones(data.shape))
            else:
                prevOp = Constant(dim.cl.ones(data.shape))
        '''直接求eval的方式也没有问题,因为上一层的反算结果已经完成了。这种方式的问题是
       不能在总算式的gradFn.gradExpression()中看到各偏导的dot计算公式
    '''
        #dLeft = Constant(prevOp.eval().dot(self.right.eval().T))
        #dRight = Constant(self.left.eval().T.dot(prevOp.eval()))

        if (self.catch and self._grads.get(self.left.name, None)):
            #print("catch left")
            dLeft = self._grads.get(self.left.name)
        else:
            #tRight = dim.autograd.TOperate.wrapper(self.right,None)
            tRight = Constant(self.right.eval().T)
            dLeft = dim.autograd.DotOperate.wrapper(prevOp, tRight)
            #print("catch left",self.left.name)
            #dLeft = Constant(prevOp.eval().dot(self.right.eval().T))
            self._grads[self.left.name] = dLeft

        if (self.catch and self._grads.get(self.right.name, None)):
            #print("catch right")
            dRight = self._grads.get(self.right.name)
        else:
            #print("catch right",self.right.name)
            #tLeft = dim.autograd.TOperate.wrapper(self.left,None)
            tLeft = Constant(self.left.eval().T)
            dRight = dim.autograd.DotOperate.wrapper(tLeft, prevOp)
            #dRight = Constant(self.left.eval().T.dot(prevOp.eval()))
            self._grads[self.right.name] = dRight

        if (self.left.name == partial.name):
            part1 = dLeft
            part2 = self.right.partGrad(partial, dRight)
        elif (self.right.name == partial.name):
            part1 = self.left.partGrad(partial, dLeft)
            part2 = dRight
        else:
            part1 = self.left.partGrad(partial, dLeft)
            part2 = self.right.partGrad(partial, dRight)

        rst = dim.autograd.AddOperate.wrapper(part1, part2)
        self._grads[partial.name] = rst
        return rst
示例#5
0
 def partGrad(self,partial,prevOp):
   if (partial.type!="Variable"): raise Exception("partial参数必须是Variable类型")
   if (self.catch and self._grads.get(partial.name,None)): return self._grads[partial.name]
   if (prevOp is None): prevOp=Constant(dim.ones(self.eval().shape))
   part1 = dim.autograd.MulOperate.wrapper(self.right,prevOp)
   part2 = self.left.partGrad(partial,part1)
   part3 = dim.autograd.MulOperate.wrapper(self.left,prevOp)
   part4 = self.right.partGrad(partial,part3)
   part5 = dim.autograd.SubOperate.wrapper(part2,part4)
   part6 = dim.autograd.PowOperate.wrapper(self.right,Constant(2))
   part7 = dim.autograd.DivOperate.wrapper(part5,part6)
   rst = part7
   self._grads[partial.name]=rst
   return rst  
示例#6
0
文件: variable.py 项目: youht88/dimpy
    def partGrad(self, partial={}, prevOp=None):
        if (partial.type != "Variable"):
            raise Exception("partial参数必须是Variable类型")
        if (prevOp is None):
            data = self.eval()
            if isinstance(data, dim.Vector):
                prevOp = Constant(dim.ones(data.shape))
            elif isinstance(data, dim.cl.Array):
                prevOp = Constant(dim.cl.ones(data.shape))
        #if (prevOp is None): prevOp=Constant(1)

        if (partial.name == self.name):
            rst = prevOp
            '''if (self.isNumber(self.data)): #标量
        if(partial.data.ndim==1):# 
          #console.log("标量对标量求导")
          rst= Constant(dim.ones(partial.data.size)*prevOp)
        elif (partial.data.ndim==2):
          #console.log("标量对矩阵求导")
          rst= Constant(dim.ones(partial.data.shape).T*prevOp)
        else: 
          rst = Constant(prevOp)
      elif (self.data.ndim==1): #向量
        if self.isNumber(partial.data):
          #console.log("向量对标量求导")
          pass
        elif (partial.data.ndim==1):
          #console.log("向量对向量求导,理论应该是返回雅可比矩阵")
          rst = Constant(dim.ones(self.data.size))
        else:
          raise Exception("不支持向量关于矩阵的求导运算")
      elif (self.data.ndim==2): #矩阵
        if self.isNumber(partial.data):
          #console.log("矩阵对标量求导")
          rst = Constant(dim.ones(self.data.shape))
        else: 
          #console.log("矩阵对矩阵求导")
          rst = Constant(dim.ones(self.data.shape))
          #raise Exception("不支持矩阵关于向量或矩阵的求导运算")
      else:
        raise Exception("不支持超过两维的高阶求导")
      '''
        else:
            #console.log("对非自身变量求导为0")
            rst = Constant(0)
        self._grads[self.name] = rst
        return rst
示例#7
0
    def partGrad(self, partial, prevOp):
        if (partial.type not in ["Variable", "Operate"]):
            raise Exception("partial参数必须是Variable、Operate类型")
        if (self.catch and self._grads.get(partial.name, None)):
            return self._grads[partial.name]
        if (prevOp is None):
            data = self.eval()
            if isinstance(data, dim.Vector):
                prevOp = Constant(dim.ones(data.shape))
            else:
                prevOp = Constant(dim.cl.ones(data.shape))
        part1 = self.left.partGrad(partial, prevOp)
        part2 = self.right.partGrad(partial, prevOp)

        rst = dim.autograd.AddOperate.wrapper(part1, part2)
        self._grads[partial.name] = rst

        return rst
示例#8
0
    def partGrad(self, partial, prevOp):
        if (partial.type != "Variable"):
            raise Exception("partial参数必须是Variable类型")
        if (self.catch and self._grads.get(partial.name, None)):
            return self._grads[partial.name]
        if (prevOp is None): prevOp = Constant(dim.ones(self.eval().shape))

        if (self.catch and self._grads.get(self.left.name, None)):
            part4 = self._grads[self.left.name]
        else:
            c = Constant(self.right.eval() - 1)
            part2 = dim.autograd.PowOperate.wrapper(self.left, c)
            part3 = dim.autograd.MulOperate.wrapper(self.right, part2)
            part4 = dim.autograd.MulOperate.wrapper(part3, prevOp)
            self._grads[self.left.name] = part4

        rst = self.left.partGrad(partial, part4)
        self._grads[partial.name] = rst
        return rst
示例#9
0
 def partGrad(self,partial,prevOp):
   if (partial.type!="Variable"): raise Exception("partial参数必须是Variable类型")
   if (self.catch and self._grads.get(partial.name,None)): return self._grads[partial.name]
   if (prevOp is None): prevOp=Constant(dim.ones(self.eval().shape))
   
   dLeft=dim.autograd.ConvTranspose1dOperate.wrapper(prevOp,self.right,self.args)
   temp1 = prevOp.eval().swapaxes(0,1)
   temp2 = self.left.eval().swapaxes(0,1)
   temp3 = dim.nn.functional.conv1d(temp2,temp1)
   dRight = Constant(temp3.swapaxes(0,1))
   if (self.left.name==partial.name):
     part1 = dLeft
     part2=self.right.partGrad(partial,dRight)
   elif (self.right.name==partial.name):
     part1 = dRight
     part2=self.left.partGrad(partial,dLeft)
   else:
     part1=self.left.partGrad(partial,dLeft)
     part2=self.right.partGrad(partial,dRight)
 
   part3=dim.autograd.AddOperate.wrapper(part1,part2)
   rst = part3
   self._grads[partial.name]=rst
   return rst