Пример #1
0
    def forward(self, *inputs: Tensor, **kwargs) -> MultivariateNormal:
        """Forward execution of the recognition model."""
        num_particles = kwargs.get('num_particles', 1)
        output_sequence, input_sequence = inputs

        batch_size = output_sequence.shape[0]

        # Reshape input/output sequence
        output_sequence = output_sequence[:, :self.length, :]
        input_sequence = input_sequence[:, :self.length, :]
        io_sequence = torch.cat((output_sequence, input_sequence), dim=-1)

        num_layers = self.lstm.num_layers * (1 + self.lstm.bidirectional)
        hidden = (torch.randn(num_layers, batch_size, self.lstm.hidden_size),
                  torch.randn(num_layers, batch_size, self.lstm.hidden_size))
        out, hidden = self.lstm(io_sequence, hidden)
        x = out[:, -1]

        loc = self.mean(x).expand(num_particles, batch_size,
                                  self.dim_states).permute(1, 2, 0)
        cov = safe_softplus(self.var(x)).expand(num_particles, batch_size,
                                                self.dim_states).permute(
                                                    1, 2, 0)

        return MultivariateNormal(loc, covariance_matrix=torch.diag_embed(cov))
Пример #2
0
    def forward(self, *inputs: Tensor, **kwargs) -> MultivariateNormal:
        """Forward execution of the recognition model."""
        num_particles = kwargs.get('num_particles', 1)

        output_sequence, input_sequence = inputs

        batch_size = output_sequence.shape[0]

        # Reshape input/output sequence
        output_sequence = output_sequence[:, :self.length, :]
        input_sequence = input_sequence[:, :self.length, :]
        io_sequence = torch.cat((output_sequence, input_sequence), dim=-1)

        # Forward Propagate.
        x = self.max_pool1(torch.relu(self.conv1(io_sequence)))
        x = self.max_pool2(torch.relu(self.conv2(x)))  # type: ignore
        x = x.view(batch_size, -1)  # type: ignore

        loc = self.mean(x).expand(num_particles, batch_size,
                                  self.dim_states).permute(1, 2, 0)
        cov = safe_softplus(self.var(x)).expand(num_particles, batch_size,
                                                self.dim_states).permute(
                                                    1, 2, 0)

        return MultivariateNormal(loc, covariance_matrix=torch.diag_embed(cov))
Пример #3
0
def test_safety():
    x = torch.rand(32, 4, 10)
    assert torch.all(safe_softplus(x) > torch.nn.functional.softplus(x))
    assert_allclose(safe_softplus(x, 0), torch.nn.functional.softplus(x))
Пример #4
0
def test_circle():
    x = torch.rand(32, 4, 10)
    assert_allclose(x, safe_softplus(inverse_softplus(x), 0))
    assert_allclose(x, inverse_softplus(safe_softplus(x, 1e-12)))
Пример #5
0
def test_shape():
    x = torch.rand(32, 4, 10)
    assert safe_softplus(x).shape == torch.Size([32, 4, 10])
    assert inverse_softplus(x).shape == torch.Size([32, 4, 10])
Пример #6
0
 def __str__(self) -> str:
     """Return recognition model parameters as a string."""
     return str(safe_softplus(self.var.bias).detach().numpy())
Пример #7
0
 def variance(self) -> torch.Tensor:
     """Get Diagonal Covariance Matrix."""
     return safe_softplus(self.variance_t)