Пример #1
0
 def dc(self, ind=None):
     if not self.p.userProvided.c: return empty_arr.copy().reshape(0, self.p.n)
     if ind is None:
         if not hasattr(self, '_dc'): self._dc = self.p.dc(self.x)
         return Copy(self._dc)
     else:
         if hasattr(self, '_dc'): return Copy(self._dc[ind])
         else: return Copy(self.p.dc(self.x, ind))
Пример #2
0
    def sum_of_all_active_constraints(self):
        if not hasattr(self, '_sum_of_all_active_constraints'):
            p = self.p
            if p.solver.__name__ == 'ralg':
                tol = p.contol / 2.0
            else:
                tol = 0.0
#            elif p.solver.__name__ == 'gsubg':
#                tol = 0.0
#            else:
#                p.err('unimplemented case in Point.py')
                
            c, h= self.c(), self.h()
            all_lin = self.all_lin()
            self._sum_of_all_active_constraints = (c[c>0] - 0).sum() + (h[h>tol] - tol).sum() - (h[h<-tol] + tol).sum() + all_lin
        return Copy(self._sum_of_all_active_constraints)
Пример #3
0
    def sum_of_all_active_constraints_gradient(self):
        if not hasattr(self, '_sum_of_all_active_constraints_gradient'):
            p = self.p
            contol = p.contol
            x = self.x
            direction = self.all_lin_gradient()
            if p.solver.__name__ == 'ralg':
                new = 1
            elif p.solver.__name__ == 'gsubg':
                new = 0
            else:
                p.err('unhandled case in Point._getDirection')
                
            if p.userProvided.c:
                th = 0.0
                #th = contol / 2.0
                C = p.c(x)
                Ind = C>th
                ind = where(Ind)[0]
                activeC = asarray(C[Ind])# asarray and Ind for PyPy compatibility
                if len(ind) > 0:
                    tmp = p.dc(x, ind)

                    if new:
                        if tmp.ndim == 1 or min(tmp.shape) == 1:
                            if hasattr(tmp, 'toarray'): 
                                tmp = tmp.toarray()#.flatten()
                            if activeC.size == prod(tmp.shape):
                                activeC = activeC.reshape(tmp.shape)
                            tmp *= (activeC-th*(1.0-1e-15))/norm(tmp)
                        else:
                            if hasattr(tmp, 'toarray'):
                                tmp = tmp.toarray()
                            tmp *= ((activeC - th*(1.0-1e-15))/sqrt((tmp**2).sum(1))).reshape(-1, 1)
                            
                    if tmp.ndim > 1:
                        tmp = tmp.sum(0)
                    direction += (tmp.A if type(tmp) != ndarray else tmp).flatten()
            

            if p.userProvided.h:
                #th = 0.0
                th = contol / 2.0
                H = p.h(x)
                Ind1 = H>th
                ind1 = where(Ind1)[0]
                H1 = asarray(H[Ind1])# asarray and Ind1 for PyPy compatibility
                if len(ind1) > 0:
                    tmp = p.dh(x, ind1)
                    
                    if new:
                        if tmp.ndim == 1 or min(tmp.shape) == 1:
                            if hasattr(tmp, 'toarray'): 
                                tmp = tmp.toarray()#.flatten()
                            if H1.size == prod(tmp.shape):
                                H1 = H1.reshape(tmp.shape)
                            tmp *= (H1-th*(1.0-1e-15))/norm(tmp)
                        else:
                            if hasattr(tmp, 'toarray'):
                                tmp = tmp.toarray()
                            tmp *= ((H1 - th*(1.0-1e-15))/sqrt((tmp**2).sum(1))).reshape(-1, 1)
                    
                    if tmp.ndim > 1: 
                        tmp = tmp.sum(0)
                    direction += (tmp.A if isspmatrix(tmp) or hasattr(tmp, 'toarray') else tmp).flatten()
                ind2 = where(H<-th)[0]
                H2 = H[ind2]
                if len(ind2) > 0:
                    tmp = p.dh(x, ind2)
                    if new:
                        if tmp.ndim == 1 or min(tmp.shape) == 1:
                            if hasattr(tmp, 'toarray'): 
                                tmp = tmp.toarray()#.flatten()
                            if H2.size == prod(tmp.shape):
                                H2 = H2.reshape(tmp.shape)                                    
                            tmp *= (-H2-th*(1.0-1e-15))/norm(tmp)
                        else:
                            if hasattr(tmp, 'toarray'):
                                tmp = tmp.toarray()
                            tmp *= ((-H2 - th*(1.0-1e-15))/sqrt((tmp**2).sum(1))).reshape(-1, 1)
                    
                    if tmp.ndim > 1: 
                        tmp = tmp.sum(0)
                    direction -= (tmp.A if type(tmp) != ndarray else tmp).flatten()
            self._sum_of_all_active_constraints_gradient = direction
        return Copy(self._sum_of_all_active_constraints_gradient)
Пример #4
0
 def df(self):
     if not hasattr(self, '_df'): self._df = self.p.df(self.x)
     return Copy(self._df)