def _create_maf_transform(self, context_features=None):
     return transforms.MaskedAffineAutoregressiveTransform(
         features=self.dimensions,
         hidden_features=self.maf_features,
         context_features=context_features,
         num_blocks=self.maf_hidden_blocks,
         use_residual_blocks=False,
         random_mask=False,
         activation=torch.nn.functional.relu,
         dropout_probability=0.0,
         use_batch_norm=False)
示例#2
0
def make_flow(latent_dim, n_layers):
    transform_list = [BatchNormTransform(latent_dim)]
    for _ in range(n_layers):
        transform_list.extend(
            [
                transforms.MaskedAffineAutoregressiveTransform(
                    features=latent_dim,
                    hidden_features=64,
                ),
                transforms.RandomPermutation(latent_dim),
            ]
        )

    transform = transforms.CompositeTransform(transform_list)

    # Define a base distribution.
    base_distribution = distributions.StandardNormal(shape=[latent_dim])

    # Combine into a flow.
    return flows.Flow(transform=transform, distribution=base_distribution)
示例#3
0
def get_flow(
    model: str,
    dim_distribution: int,
    dim_context: Optional[int] = None,
    embedding: Optional[torch.nn.Module] = None,
    hidden_features: int = 50,
    made_num_mixture_components: int = 10,
    made_num_blocks: int = 4,
    flow_num_transforms: int = 5,
    mean=0.0,
    std=1.0,
) -> torch.nn.Module:
    """Density estimator

    Args:
        model: Model, one of maf / made / nsf
        dim_distribution: Dim of distribution
        dim_context: Dim of context
        embedding: Embedding network
        hidden_features: For all, number of hidden features
        made_num_mixture_components: For MADEs only, number of mixture components
        made_num_blocks: For MADEs only, number of blocks
        flow_num_transforms: For flows only, number of transforms
        mean: For normalization
        std: For normalization

    Returns:
        Neural network
    """
    standardizing_transform = transforms.AffineTransform(shift=-mean / std,
                                                         scale=1 / std)

    features = dim_distribution
    context_features = dim_context

    if model == "made":
        transform = standardizing_transform
        distribution = distributions_.MADEMoG(
            features=features,
            hidden_features=hidden_features,
            context_features=context_features,
            num_blocks=made_num_blocks,
            num_mixture_components=made_num_mixture_components,
            use_residual_blocks=True,
            random_mask=False,
            activation=torch.relu,
            dropout_probability=0.0,
            use_batch_norm=False,
            custom_initialization=True,
        )
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "maf":
        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.MaskedAffineAutoregressiveTransform(
                    features=features,
                    hidden_features=hidden_features,
                    context_features=context_features,
                    num_blocks=2,
                    use_residual_blocks=False,
                    random_mask=False,
                    activation=torch.tanh,
                    dropout_probability=0.0,
                    use_batch_norm=True,
                ),
                transforms.RandomPermutation(features=features),
            ]) for _ in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = distributions_.StandardNormal((features, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "nsf":
        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.PiecewiseRationalQuadraticCouplingTransform(
                    mask=create_alternating_binary_mask(features=features,
                                                        even=(i % 2 == 0)),
                    transform_net_create_fn=lambda in_features, out_features:
                    nets.ResidualNet(
                        in_features=in_features,
                        out_features=out_features,
                        hidden_features=hidden_features,
                        context_features=context_features,
                        num_blocks=2,
                        activation=torch.relu,
                        dropout_probability=0.0,
                        use_batch_norm=False,
                    ),
                    num_bins=10,
                    tails="linear",
                    tail_bound=3.0,
                    apply_unconditional_transform=False,
                ),
                transforms.LULinear(features, identity_init=True),
            ]) for i in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = distributions_.StandardNormal((features, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "nsf_bounded":

        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.PiecewiseRationalQuadraticCouplingTransform(
                    mask=create_alternating_binary_mask(
                        features=dim_distribution, even=(i % 2 == 0)),
                    transform_net_create_fn=lambda in_features, out_features:
                    nets.ResidualNet(
                        in_features=in_features,
                        out_features=out_features,
                        hidden_features=hidden_features,
                        context_features=context_features,
                        num_blocks=2,
                        activation=F.relu,
                        dropout_probability=0.0,
                        use_batch_norm=False,
                    ),
                    num_bins=10,
                    tails="linear",
                    tail_bound=np.sqrt(
                        3),  # uniform with sqrt(3) bounds has unit-variance
                    apply_unconditional_transform=False,
                ),
                transforms.RandomPermutation(features=dim_distribution),
            ]) for i in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = StandardUniform(shape=(dim_distribution, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    else:
        raise ValueError

    return neural_net