def test_make_fc(self): # Single layer fc = make_fc(3, [4]) assert len(fc) == 1 # Multiple layers fc = make_fc(3, [4, 5, 6]) assert len(fc) == 3 # Raise Exception with pytest.raises(AssertionError): make_fc(3, 4)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device # Q1 self.first_feature_layers = make_fc(flatdim(env.observation_space) + flatdim(env.action_space), [256, 256]) self.first_Q_head = nn.Linear(256, 1) # Q2 self.second_feature_layers = make_fc(flatdim(env.observation_space) + flatdim(env.action_space), [256, 256]) self.second_Q_head = nn.Linear(256, 1) self.to(self.device)
def make_params(self, config): self.feature_layers = make_fc(self.env_spec.observation_space.flat_dim, config['network.hidden_sizes']) self.layer_norms = nn.ModuleList([ nn.LayerNorm(hidden_size) for hidden_size in config['network.hidden_sizes'] ])
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), config['nn.sizes']) self.layer_norms = nn.ModuleList([nn.LayerNorm(hidden_size) for hidden_size in config['nn.sizes']]) self.to(self.device)
def make_params(self, config): self.feature_layers = make_fc(self.env_spec.observation_space.flat_dim, config['network.hidden_sizes']) self.layer_norms = nn.ModuleList([ nn.LayerNorm(hidden_size) for hidden_size in config['network.hidden_sizes'] ]) self.output_layer = StateValueHead(config, self.device, config['network.hidden_sizes'][-1])
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space) + flatdim(env.action_space), [400, 300]) self.Q_head = nn.Linear(300, 1) self.to(self.device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), [256, 256]) self.mean_head = nn.Linear(256, flatdim(env.action_space)) self.logstd_head = nn.Linear(256, flatdim(env.action_space)) self.to(device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), config['nn.sizes']) for layer in self.feature_layers: ortho_init(layer, nonlinearity='tanh', constant_bias=0.0) self.to(self.device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(spaces.flatdim(env.observation_space), config['nn.sizes']) for layer in self.feature_layers: ortho_init(layer, nonlinearity='relu', constant_bias=0.0) self.layer_norms = nn.ModuleList([nn.LayerNorm(hidden_size) for hidden_size in config['nn.sizes']]) self.to(self.device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), [256, 256]) self.action_head = TanhDiagGaussianHead(256, flatdim(env.action_space), device, **kwargs) self.to(device)
def __init__(self, config, device, **kwargs): super().__init__(**kwargs) self.config = config self.device = device self.encoder = make_fc(784, [400]) for layer in self.encoder: ortho_init(layer, nonlinearity='relu', constant_bias=0.0) self.mean_head = nn.Linear(400, config['nn.z_dim']) ortho_init(self.mean_head, weight_scale=0.01, constant_bias=0.0) self.logvar_head = nn.Linear(400, config['nn.z_dim']) ortho_init(self.logvar_head, weight_scale=0.01, constant_bias=0.0) self.decoder = make_fc(config['nn.z_dim'], [400]) for layer in self.decoder: ortho_init(layer, nonlinearity='relu', constant_bias=0.0) self.x_head = nn.Linear(400, 784) ortho_init(self.x_head, nonlinearity='sigmoid', constant_bias=0.0) self.to(device) self.total_iter = 0
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), [400, 300]) self.action_head = nn.Linear(300, flatdim(env.action_space)) assert np.unique(env.action_space.high).size == 1 assert -np.unique(env.action_space.low).item() == np.unique(env.action_space.high).item() self.max_action = env.action_space.high[0] self.to(self.device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(flatdim(env.observation_space), config['nn.sizes']) for layer in self.feature_layers: ortho_init(layer, nonlinearity='tanh', constant_bias=0.0) feature_dim = config['nn.sizes'][-1] self.V_head = nn.Linear(feature_dim, 1) ortho_init(self.V_head, weight_scale=1.0, constant_bias=0.0) self.to(self.device)
def __init__(self, config, env, device, **kwargs): super().__init__(**kwargs) self.config = config self.env = env self.device = device self.feature_layers = make_fc(spaces.flatdim(env.observation_space), config['nn.sizes']) for layer in self.feature_layers: ortho_init(layer, nonlinearity='tanh', constant_bias=0.0) feature_dim = config['nn.sizes'][-1] if isinstance(env.action_space, spaces.Discrete): self.action_head = CategoricalHead(feature_dim, env.action_space.n, device, **kwargs) elif isinstance(env.action_space, spaces.Box): self.action_head = DiagGaussianHead( feature_dim, spaces.flatdim(env.action_space), device, config['agent.std0'], **kwargs) self.to(self.device)
def make_params(self, config): self.feature_layers = make_fc(1, [15, 15]) self.pi_head = nn.Linear(15, config['num_density'] * 1) self.mean_head = nn.Linear(15, config['num_density'] * 1) self.logvar_head = nn.Linear(15, config['num_density'] * 1)
def make_params(self, config): self.feature_layers = make_fc(784, [400]) self.mean_head = nn.Linear(400, config['network.z_dim']) self.logvar_head = nn.Linear(400, config['network.z_dim'])
def make_params(self, config): self.feature_layers = make_fc(config['network.z_dim'], [400]) self.x_head = nn.Linear(400, 784)