Exemple #1
0
 def __init__(self, 
              x0, 
              sigma0, 
              opts=None):
     self.x0 = x0
     self.sigma0 = sigma0
     self.popsize = opts['popsize']
     self.elite_ratio = opts['elite_ratio']
     self.elite_size = max(1, int(self.elite_ratio*self.popsize))
     
     self.seed = opts['seed'] if 'seed' in opts else np.random.randint(1, 2**32)
     self.np_random = np.random.RandomState(self.seed)
     
     self.noise_scheduler = LinearSchedule(*opts['noise_scheduler_args'])
     self.iter = 0
     
     # initialize mean and std
     self.x = np.asarray(x0).astype(np.float32)
     self.shape = self.x.shape
     if np.isscalar(sigma0):
         self.sigma = np.full(self.shape, sigma0, dtype=np.float32)
     else:
         self.sigma = np.asarray(sigma0).astype(np.float32)
         
     self.xbest = None
     self.fbest = None
Exemple #2
0
    def __init__(self, x0, sigma0, opts=None):
        self.x0 = x0
        self.sigma0 = sigma0
        self.popsize = opts['popsize']
        self.sigma_scheduler = LinearSchedule(*opts['sigma_scheduler_args'])
        self.lr = opts['lr']
        self.lr_decay = opts['lr_decay']
        self.min_lr = opts['min_lr']
        self.antithetic = opts['antithetic']
        if self.antithetic:
            assert self.popsize % 2 == 0, 'popsize must be even for antithetic sampling. '
        self.rank_transform = opts['rank_transform']
        
        self.seed = opts['seed'] if 'seed' in opts else np.random.randint(1, 2**32)
        self.np_random = np.random.RandomState(self.seed)
        
        self.iter = 0

        # initialize mean and std
        self.x = torch.from_numpy(np.asarray(x0)).float()
        self.x.requires_grad = True
        self.shape = tuple(self.x.shape)
        if np.isscalar(sigma0):
            self.sigma = np.full(self.shape, sigma0, dtype=np.float32)
        else:
            self.sigma = np.asarray(sigma0).astype(np.float32)
        
        self.optimizer = optim.Adam([self.x], lr=self.lr)
        self.lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=self.lr_decay)
        
        self.xbest = None
        self.fbest = None
Exemple #3
0
def test_linear_schedule():
    with pytest.raises(AssertionError):
        LinearSchedule(1.0, 0.1, 0, 0)
    with pytest.raises(AssertionError):
        LinearSchedule(1.0, 0.1, -1, 0)
    with pytest.raises(AssertionError):
        LinearSchedule(1.0, 0.1, 10, -1)
    with pytest.raises(AssertionError):
        LinearSchedule(1.0, 0.1, 10, 0)(-1)

    # increasing: without warmup start
    scheduler = LinearSchedule(initial=0.5, final=2.0, N=3, start=0)
    assert scheduler(0) == 0.5
    assert scheduler(1) == 1.0
    assert scheduler(2) == 1.5
    assert scheduler(3) == 2.0
    assert all([scheduler(i) == 2.0] for i in [4, 5, 6, 7, 8])

    # increasing: with warmup start
    scheduler = LinearSchedule(initial=0.5, final=2.0, N=3, start=2)
    assert all([scheduler(i) == 0.5] for i in [0, 1, 2])
    assert scheduler(3) == 1.0
    assert scheduler(4) == 1.5
    assert scheduler(5) == 2.0
    assert all([scheduler(i) == 2.0 for i in [6, 7, 8]])

    # decreasing: without warmup start
    scheduler = LinearSchedule(initial=1.0, final=0.1, N=3, start=0)
    assert scheduler(0) == 1.0
    assert scheduler(1) == 0.7
    assert scheduler(2) == 0.4
    assert scheduler(3) == 0.1
    assert all([scheduler(i) == 0.1 for i in [4, 5, 6]])

    # decreasing: with warmup start
    scheduler = LinearSchedule(initial=1.0, final=0.1, N=3, start=2)
    assert all([scheduler(i) for i in [0, 1, 2]])
    assert scheduler(3) == 0.7
    assert scheduler(4) == 0.4
    assert scheduler(5) == 0.1
    assert all([scheduler(i) == 0.1 for i in [6, 7, 8]])