Пример #1
0
 def step(self, x, last_b):
     # initialize
     m = len(x)
     mu = np.matrix(last_b).T
     sigma = self.sigma
     theta = self.theta
     eps = self.eps
     x = np.matrix(x).T    # matrices are easier to manipulate
     
     # 4. Calculate the following variables
     M = mu.T * x
     V = x.T * sigma * x
     x_upper = sum(diag(sigma) * x) / trace(sigma)  
     
     # 5. Update the portfolio distribution
     mu, sigma = self.update(x, x_upper, mu, sigma, M, V, theta, eps)
     
     # 6. Normalize mu and sigma
     mu = tools.simplex_proj(mu)
     sigma = sigma / (m**2 * trace(sigma))
     """
     sigma(sigma < 1e-4*eye(m)) = 1e-4;
     """
     self.sigma = sigma
     return mu
Пример #2
0
    def update(self, b, x, eps):
        """ Update portfolio weights to satisfy constraint b * x >= eps
        and minimize distance to previous weights. """
        x_mean = np.mean(x)
        lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)

        # limit lambda to avoid numerical problems
        lam = min(100000, lam)

        # update portfolio
        b = b + lam * (x - x_mean)

        # project it onto simplex
        return tools.simplex_proj(b)
Пример #3
0
    def update(self, b, x, eps):
        """ Update portfolio weights to satisfy constraint b * x >= eps
        and minimize distance to previous weights. """
        x_mean = np.mean(x)
        lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)

        # limit lambda to avoid numerical problems
        lam = min(100000, lam)

        # update portfolio
        b = b + lam * (x - x_mean)

        # project it onto simplex
        return tools.simplex_proj(b)
Пример #4
0
 def update(self, b, x, eps, C):
     """ Update portfolio weights to satisfy constraint b * x <= eps
     and minimize distance to previous weights. """
     x_mean = np.mean(x)
     le = max(0., np.dot(b, x) - eps)
     
     if self.variant == 0:
         lam = le / np.linalg.norm(x - x_mean)**2
     elif self.variant == 1:
         lam = min(C, le / np.linalg.norm(x - x_mean)**2)
     elif self.variant == 2:
         lam = le / (np.linalg.norm(x - x_mean)**2 + 0.5 / C)
         
     # limit lambda to avoid numerical problems
     lam = min(100000, lam)
     
     # update portfolio
     b = b - lam * (x - x_mean)
     
     # project it onto simplex
     return tools.simplex_proj(b)
Пример #5
0
    def update(self, b, x, eps, C):
        """ Update portfolio weights to satisfy constraint b * x <= eps
        and minimize distance to previous weights. """
        x_mean = np.mean(x)
        le = max(0., np.dot(b, x) - eps)

        if self.variant == 0:
            lam = le / np.linalg.norm(x - x_mean)**2
        elif self.variant == 1:
            lam = min(C, le / np.linalg.norm(x - x_mean)**2)
        elif self.variant == 2:
            lam = le / (np.linalg.norm(x - x_mean)**2 + 0.5 / C)

        # limit lambda to avoid numerical problems
        lam = min(100000, lam)

        # update portfolio
        b = b - lam * (x - x_mean)

        # project it onto simplex
        return tools.simplex_proj(b)