Пример #1
0
class WhiteningNormalizerProcessor(Processor):
    """Normalizes the observations to have zero mean and standard deviation of one,
    i.e. it applies whitening to the inputs.

    This typically helps significantly with learning, especially if different dimensions are
    on different scales. However, it complicates training in the sense that you will have to store
    these weights alongside the policy if you intend to load it later. It is the responsibility of
    the user to do so.
    """
    def __init__(self):
        self.normalizer = None

    def process_state_batch(self, batch):
        if self.normalizer is None:
            self.normalizer = WhiteningNormalizer(shape=batch.shape[1:],
                                                  dtype=batch.dtype)
        self.normalizer.update(batch)
        return self.normalizer.normalize(batch)

    def process_action(self, action):
        upper_action, delta_x_norm, acc_norm = action
        delta_x = np.clip((delta_x_norm + 1) / 2 * 50 + 10, 10, 60)
        acc = np.clip(acc_norm * 3, -3, 3)

        return upper_action, delta_x, acc

    @staticmethod
    def process_reward_batch(batch):
        return batch / 100
Пример #2
0
 def load_weights(self, filepath):
     # load models weights
     self.model.load_weights(filepath)
     self.update_target_model_hard()
     filename, extension = os.path.splitext(filepath)
     left_model_filepath = filename + '_left_model' + extension
     straight_model_filepath = filename + '_straight_model' + extension
     right_model_filepath = filename + '_right_model' + extension
     self.turn_left_agent.load_weights(left_model_filepath)
     self.go_straight_agent.load_weights(straight_model_filepath)
     self.turn_right_agent.load_weights(right_model_filepath)
     # load state processor
     upper_processor_filepath = filename + '.pickle'
     left_processor_filepath = filename + '_left_model' + '.pickle'
     straight_processor_filepath = filename + '_straight_model' + '.pickle'
     right_processor_filepath = filename + '_right_model' + '.pickle'
     if not self.processor.normalizer:
         self.processor.normalizer = WhiteningNormalizer(shape=(10, 56))
     if not self.turn_left_agent.processor.normalizer:
         self.turn_left_agent.processor.normalizer = WhiteningNormalizer(
             shape=(10, 41))
     if not self.go_straight_agent.processor.normalizer:
         self.go_straight_agent.processor.normalizer = WhiteningNormalizer(
             shape=(10, 59))
     if not self.turn_right_agent.processor.normalizer:
         self.turn_right_agent.processor.normalizer = WhiteningNormalizer(
             shape=(10, 41))
     self.processor.normalizer.load_param(upper_processor_filepath)
     self.turn_left_agent.processor.normalizer.load_param(
         left_processor_filepath)
     self.go_straight_agent.processor.normalizer.load_param(
         straight_processor_filepath)
     self.turn_right_agent.processor.normalizer.load_param(
         right_processor_filepath)
Пример #3
0
def test_whitening_normalizer():
    x = np.random.normal(loc=.2, scale=2., size=(1000, 5))
    normalizer = WhiteningNormalizer(shape=(5,))
    normalizer.update(x[:500])
    normalizer.update(x[500:])

    assert_allclose(normalizer.mean, np.mean(x, axis=0))
    assert_allclose(normalizer.std, np.std(x, axis=0))
    
    x_norm = normalizer.normalize(x)
    assert_allclose(np.mean(x_norm, axis=0), np.zeros(5, dtype=normalizer.dtype), atol=1e-5)
    assert_allclose(np.std(x_norm, axis=0), np.ones(5, dtype=normalizer.dtype), atol=1e-5)

    x_denorm = normalizer.denormalize(x_norm)
    assert_allclose(x_denorm, x)
Пример #4
0
class WhiteningNormalizerProcessor(Processor):
    """Normalizes the observations to have zero mean and standard deviation of one,
    i.e. it applies whitening to the inputs.

    This typically helps significantly with learning, especially if different dimensions are
    on different scales. However, it complicates training in the sense that you will have to store
    these weights alongside the policy if you intend to load it later. It is the responsibility of
    the user to do so.
    """
    def __init__(self):
        self.normalizer = None

    def process_state_batch(self, batch):
        if self.normalizer is None:
            self.normalizer = WhiteningNormalizer(shape=batch.shape[1:], dtype=batch.dtype)
        self.normalizer.update(batch)
        return self.normalizer.normalize(batch)
Пример #5
0
 def load_processor(self):
     f = np.load('osim-rl/processor.npz')
     dtype = f['_sum'].dtype
     if (self.processor.normalizer == None):
         self.processor.normalizer = WhiteningNormalizer(
             shape=(1, ) + self.env.observation_space.shape, dtype=dtype)
     self.processor.normalizer._sum = f['_sum']
     self.processor.normalizer._count = int(f['_count'][0])
     self.processor.normalizer._sumsq = f['_sumsq']
     self.processor.normalizer.mean = f['mean']
     self.processor.normalizer.std = f['std']
Пример #6
0
def test_whitening_normalizer():
    x = np.random.normal(loc=.2, scale=2., size=(1000, 5))
    normalizer = WhiteningNormalizer(shape=(5, ))
    normalizer.update(x[:500])
    normalizer.update(x[500:])

    assert_allclose(normalizer.mean, np.mean(x, axis=0))
    assert_allclose(normalizer.std, np.std(x, axis=0))

    x_norm = normalizer.normalize(x)
    assert_allclose(np.mean(x_norm, axis=0),
                    np.zeros(5, dtype=normalizer.dtype),
                    atol=1e-5)
    assert_allclose(np.std(x_norm, axis=0),
                    np.ones(5, dtype=normalizer.dtype),
                    atol=1e-5)

    x_denorm = normalizer.denormalize(x_norm)
    assert_allclose(x_denorm, x)
Пример #7
0
 def process_state_batch(self, batch):
     if self.normalizer is None:
         self.normalizer = WhiteningNormalizer(shape=batch.shape[1:],
                                               dtype=batch.dtype)
     self.normalizer.update(batch)
     return self.normalizer.normalize(batch)