Exemplo n.º 1
0
def posterior_nn(
    model: str,
    prior: torch.distributions.Distribution,
    context: torch.Tensor,
    embedding: Optional[torch.nn.Module] = None,
    hidden_features: int = 50,
    mdn_num_components: int = 20,
    made_num_mixture_components: int = 10,
    made_num_blocks: int = 4,
    flow_num_transforms: int = 5,
) -> torch.nn.Module:
    """Neural posterior density estimator

    Args:
        model: Model, one of maf / mdn / made / nsf
        prior: Prior distribution
        context: Observation
        embedding: Embedding network
        hidden_features: For all, number of hidden features
        mdn_num_components: For MDNs only, number of components
        made_num_mixture_components: For MADEs only, number of mixture components
        made_num_blocks: For MADEs only, number of blocks
        flow_num_transforms: For flows only, number of transforms

    Returns:
        Neural network
    """
    mean, std = (prior.mean, prior.stddev)
    standardizing_transform = transforms.AffineTransform(
        shift=-mean / std, scale=1 / std
    )

    parameter_dim = prior.sample([1]).shape[1]

    context = utils.torchutils.atleast_2d(context)
    observation_dim = torch.tensor([context.shape[1:]])

    if model == "mdn":
        neural_net = MultivariateGaussianMDN(
            features=parameter_dim,
            context_features=observation_dim,
            hidden_features=hidden_features,
            hidden_net=nn.Sequential(
                nn.Linear(observation_dim, hidden_features),
                nn.ReLU(),
                nn.Dropout(p=0.0),
                nn.Linear(hidden_features, hidden_features),
                nn.ReLU(),
                nn.Linear(hidden_features, hidden_features),
                nn.ReLU(),
            ),
            num_components=mdn_num_components,
            custom_initialization=True,
        )

    elif model == "made":
        transform = standardizing_transform
        distribution = distributions_.MADEMoG(
            features=parameter_dim,
            hidden_features=hidden_features,
            context_features=observation_dim,
            num_blocks=made_num_blocks,
            num_mixture_components=made_num_mixture_components,
            use_residual_blocks=True,
            random_mask=False,
            activation=torch.relu,
            dropout_probability=0.0,
            use_batch_norm=False,
            custom_initialization=True,
        )
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "maf":
        transform = transforms.CompositeTransform(
            [
                transforms.CompositeTransform(
                    [
                        transforms.MaskedAffineAutoregressiveTransform(
                            features=parameter_dim,
                            hidden_features=hidden_features,
                            context_features=observation_dim,
                            num_blocks=2,
                            use_residual_blocks=False,
                            random_mask=False,
                            activation=torch.tanh,
                            dropout_probability=0.0,
                            use_batch_norm=True,
                        ),
                        transforms.RandomPermutation(features=parameter_dim),
                    ]
                )
                for _ in range(flow_num_transforms)
            ]
        )

        transform = transforms.CompositeTransform([standardizing_transform, transform,])

        distribution = distributions_.StandardNormal((parameter_dim,))
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "nsf":
        transform = transforms.CompositeTransform(
            [
                transforms.CompositeTransform(
                    [
                        transforms.PiecewiseRationalQuadraticCouplingTransform(
                            mask=create_alternating_binary_mask(
                                features=parameter_dim, even=(i % 2 == 0)
                            ),
                            transform_net_create_fn=lambda in_features, out_features: nets.ResidualNet(
                                in_features=in_features,
                                out_features=out_features,
                                hidden_features=hidden_features,
                                context_features=observation_dim,
                                num_blocks=2,
                                activation=torch.relu,
                                dropout_probability=0.0,
                                use_batch_norm=False,
                            ),
                            num_bins=10,
                            tails="linear",
                            tail_bound=3.0,
                            apply_unconditional_transform=False,
                        ),
                        transforms.LULinear(parameter_dim, identity_init=True),
                    ]
                )
                for i in range(flow_num_transforms)
            ]
        )

        transform = transforms.CompositeTransform([standardizing_transform, transform,])

        distribution = distributions_.StandardNormal((parameter_dim,))
        neural_net = flows.Flow(transform, distribution, embedding)

    else:
        raise ValueError

    return neural_net
Exemplo n.º 2
0
def build_nsf(
        batch_x: Tensor = None,
        batch_y: Tensor = None,
        z_score_x: bool = True,
        z_score_y: bool = True,
        hidden_features: int = 50,
        num_transforms: int = 5,
        embedding_net: nn.Module = nn.Identity(),
        **kwargs,
) -> nn.Module:
    """Builds NSF p(x|y).

    Args:
        batch_x: Batch of xs, used to infer dimensionality and (optional) z-scoring.
        batch_y: Batch of ys, used to infer dimensionality and (optional) z-scoring.
        z_score_x: Whether to z-score xs passing into the network.
        z_score_y: Whether to z-score ys passing into the network.
        hidden_features: Number of hidden features.
        num_transforms: Number of transforms.
        embedding_net: Optional embedding network for y.
        kwargs: Additional arguments that are passed by the build function but are not
            relevant for maf and are therefore ignored.

    Returns:
        Neural network.
    """
    x_numel = batch_x[0].numel()
    # Infer the output dimensionality of the embedding_net by making a forward pass.
    y_numel = embedding_net(batch_y[:1]).numel()

    if x_numel == 1:
        raise NotImplementedError

    transform = transforms.CompositeTransform([
        transforms.CompositeTransform([
            transforms.PiecewiseRationalQuadraticCouplingTransform(
                mask=create_alternating_binary_mask(features=x_numel,
                                                    even=(i % 2 == 0)),
                transform_net_create_fn=lambda in_features, out_features: nets.
                ResidualNet(
                    in_features=in_features,
                    out_features=out_features,
                    hidden_features=hidden_features,
                    context_features=y_numel,
                    num_blocks=2,
                    activation=relu,
                    dropout_probability=0.0,
                    use_batch_norm=False,
                ),
                num_bins=10,
                tails="linear",
                tail_bound=3.0,
                apply_unconditional_transform=False,
            ),
            transforms.LULinear(x_numel, identity_init=True),
        ]) for i in range(num_transforms)
    ])

    if z_score_x:
        transform_zx = standardizing_transform(batch_x)
        transform = transforms.CompositeTransform([transform_zx, transform])

    if z_score_y:
        embedding_net = nn.Sequential(standardizing_net(batch_y),
                                      embedding_net)

    distribution = distributions_.StandardNormal((x_numel, ))
    neural_net = flows.Flow(transform, distribution, embedding_net)

    return neural_net
Exemplo n.º 3
0
Arquivo: flow.py Projeto: bkmi/sbi
 def mask_in_layer(i):
     return create_alternating_binary_mask(features=x_numel,
                                           even=(i % 2 == 0))
def neural_net_nsf(
    self,
    hidden_features,
    num_blocks,
    num_bins,
    xDim,
    thetaDim,
    batch_x=None,
    batch_theta=None,
    tail=3.,
    bounded=False,
    embedding_net=torch.nn.Identity()) -> torch.nn.Module:
    """Builds NSF p(x|y).

    Args:
        batch_x: Batch of xs, used to infer dimensionality and (optional) z-scoring.
        batch_y: Batch of ys, used to infer dimensionality and (optional) z-scoring.
        z_score_x: Whether to z-score xs passing into the network.
        z_score_y: Whether to z-score ys passing into the network.
        hidden_features: Number of hidden features.
        num_transforms: Number of transforms.
        embedding_net: Optional embedding network for y.
        kwargs: Additional arguments that are passed by the build function but are not
            relevant for maf and are therefore ignored.

    Returns:
        Neural network.
    """

    basic_transform = [
        transforms.CompositeTransform([
            transforms.PiecewiseRationalQuadraticCouplingTransform(
                mask=create_alternating_binary_mask(features=xDim,
                                                    even=(i % 2 == 0)).to(
                                                        self.args.device),
                transform_net_create_fn=lambda in_features, out_features: nets.
                ResidualNet(
                    in_features=in_features,
                    out_features=out_features,
                    hidden_features=hidden_features,
                    context_features=thetaDim,
                    num_blocks=2,
                    activation=torch.relu,
                    dropout_probability=0.,
                    use_batch_norm=False,
                ),
                num_bins=num_bins,
                tails='linear',
                tail_bound=tail,
                apply_unconditional_transform=False,
            ),
            transforms.RandomPermutation(features=xDim,
                                         device=self.args.device),
            transforms.LULinear(xDim, identity_init=True),
        ]) for i in range(num_blocks)
    ]

    transform = transforms.CompositeTransform(basic_transform).to(
        self.args.device)

    if batch_theta != None:
        if bounded:
            transform_bounded = transforms.Logit(self.args.device)
            if self.sim.min[0].item() != 0 or self.sim.max[0].item() != 1:
                transfomr_affine = transforms.PointwiseAffineTransform(
                    shift=-self.sim.min / (self.sim.max - self.sim.min),
                    scale=1. / (self.sim.max - self.sim.min))
                transform = transforms.CompositeTransform(
                    [transfomr_affine, transform_bounded, transform])
            else:
                transform = transforms.CompositeTransform(
                    [transform_bounded, transform])
        else:
            transform_zx = standardizing_transform(batch_x)
            transform = transforms.CompositeTransform(
                [transform_zx, transform])
        embedding_net = torch.nn.Sequential(standardizing_net(batch_theta),
                                            embedding_net)
        distribution = distributions_.StandardNormal((xDim, ),
                                                     self.args.device)
        neural_net = flows.Flow(self,
                                transform,
                                distribution,
                                embedding_net=embedding_net).to(
                                    self.args.device)
    else:
        distribution = distributions_.StandardNormal((xDim, ),
                                                     self.args.device)
        neural_net = flows.Flow(self, transform,
                                distribution).to(self.args.device)

    return neural_net
Exemplo n.º 5
0
def build_nsf(
        batch_x: Tensor = None,
        batch_y: Tensor = None,
        z_score_x: bool = True,
        z_score_y: bool = True,
        hidden_features: int = 50,
        num_transforms: int = 5,
        num_bins: int = 10,
        embedding_net: nn.Module = nn.Identity(),
        **kwargs,
) -> nn.Module:
    """Builds NSF p(x|y).

    Args:
        batch_x: Batch of xs, used to infer dimensionality and (optional) z-scoring.
        batch_y: Batch of ys, used to infer dimensionality and (optional) z-scoring.
        z_score_x: Whether to z-score xs passing into the network.
        z_score_y: Whether to z-score ys passing into the network.
        hidden_features: Number of hidden features.
        num_transforms: Number of transforms.
        num_bins: Number of bins used for the splines.
        embedding_net: Optional embedding network for y.
        kwargs: Additional arguments that are passed by the build function but are not
            relevant for maf and are therefore ignored.

    Returns:
        Neural network.
    """
    x_numel = batch_x[0].numel()
    # Infer the output dimensionality of the embedding_net by making a forward pass.
    y_numel = embedding_net(batch_y[:1]).numel()

    if x_numel == 1:

        class ContextSplineMap(nn.Module):
            """
            Neural network from `context` to the spline parameters.

            We cannot use the resnet as conditioner to learn each dimension conditioned
            on the other dimensions (because there is only one). Instead, we learn the
            spline parameters directly. In the case of conditinal density estimation,
            we make the spline parameters conditional on the context. This is
            implemented in this class.
            """
            def __init__(
                self,
                in_features: int,
                out_features: int,
                hidden_features: int,
                context_features: int,
            ):
                """
                Initialize neural network that learns to predict spline parameters.

                Args:
                    in_features: Unused since there is no `conditioner` in 1D.
                    out_features: Number of spline parameters.
                    hidden_features: Number of hidden units.
                    context_features: Number of context features.
                """
                super().__init__()
                # `self.hidden_features` is only defined such that nflows can infer
                # a scaling factor for initializations.
                self.hidden_features = hidden_features

                # Use a non-linearity because otherwise, there will be a linear
                # mapping from context features onto distribution parameters.
                self.spline_predictor = nn.Sequential(
                    nn.Linear(context_features, self.hidden_features),
                    nn.ReLU(),
                    nn.Linear(self.hidden_features, self.hidden_features),
                    nn.ReLU(),
                    nn.Linear(self.hidden_features, out_features),
                )

            def __call__(self, inputs: Tensor, context: Tensor, *args,
                         **kwargs) -> Tensor:
                """
                Return parameters of the spline given the context.

                Args:
                    inputs: Unused. It would usually be the other dimensions, but in
                        1D, there are no other dimensions.
                    context: Context features.

                Returns:
                    Spline parameters.
                """
                return self.spline_predictor(context)

        mask_in_layer = lambda i: tensor([1], dtype=uint8)
        conditioner = lambda in_features, out_features: ContextSplineMap(
            in_features,
            out_features,
            hidden_features,
            context_features=y_numel)
        if num_transforms > 1:
            warn(
                f"You are using `num_transforms={num_transforms}`. When estimating a "
                f"1D density, you will not get any performance increase by using "
                f"multiple transforms with NSF. We recommend setting "
                f"`num_transforms=1` for faster training (see also 'Change "
                f"hyperparameters of density esitmators' here: "
                f"https://www.mackelab.org/sbi/tutorial/04_density_estimators/)."
            )

    else:
        mask_in_layer = lambda i: create_alternating_binary_mask(
            features=x_numel, even=(i % 2 == 0))
        conditioner = lambda in_features, out_features: nets.ResidualNet(
            in_features=in_features,
            out_features=out_features,
            hidden_features=hidden_features,
            context_features=y_numel,
            num_blocks=2,
            activation=relu,
            dropout_probability=0.0,
            use_batch_norm=False,
        )

    transform = transforms.CompositeTransform([
        transforms.CompositeTransform([
            transforms.PiecewiseRationalQuadraticCouplingTransform(
                mask=mask_in_layer(i),
                transform_net_create_fn=conditioner,
                num_bins=num_bins,
                tails="linear",
                tail_bound=3.0,
                apply_unconditional_transform=False,
            ),
            transforms.LULinear(x_numel, identity_init=True),
        ]) for i in range(num_transforms)
    ])

    if z_score_x:
        transform_zx = standardizing_transform(batch_x)
        transform = transforms.CompositeTransform([transform_zx, transform])

    if z_score_y:
        embedding_net = nn.Sequential(standardizing_net(batch_y),
                                      embedding_net)

    distribution = distributions_.StandardNormal((x_numel, ))
    neural_net = flows.Flow(transform, distribution, embedding_net)

    return neural_net
Exemplo n.º 6
0
def get_flow(
    model: str,
    dim_distribution: int,
    dim_context: Optional[int] = None,
    embedding: Optional[torch.nn.Module] = None,
    hidden_features: int = 50,
    made_num_mixture_components: int = 10,
    made_num_blocks: int = 4,
    flow_num_transforms: int = 5,
    mean=0.0,
    std=1.0,
) -> torch.nn.Module:
    """Density estimator

    Args:
        model: Model, one of maf / made / nsf
        dim_distribution: Dim of distribution
        dim_context: Dim of context
        embedding: Embedding network
        hidden_features: For all, number of hidden features
        made_num_mixture_components: For MADEs only, number of mixture components
        made_num_blocks: For MADEs only, number of blocks
        flow_num_transforms: For flows only, number of transforms
        mean: For normalization
        std: For normalization

    Returns:
        Neural network
    """
    standardizing_transform = transforms.AffineTransform(shift=-mean / std,
                                                         scale=1 / std)

    features = dim_distribution
    context_features = dim_context

    if model == "made":
        transform = standardizing_transform
        distribution = distributions_.MADEMoG(
            features=features,
            hidden_features=hidden_features,
            context_features=context_features,
            num_blocks=made_num_blocks,
            num_mixture_components=made_num_mixture_components,
            use_residual_blocks=True,
            random_mask=False,
            activation=torch.relu,
            dropout_probability=0.0,
            use_batch_norm=False,
            custom_initialization=True,
        )
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "maf":
        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.MaskedAffineAutoregressiveTransform(
                    features=features,
                    hidden_features=hidden_features,
                    context_features=context_features,
                    num_blocks=2,
                    use_residual_blocks=False,
                    random_mask=False,
                    activation=torch.tanh,
                    dropout_probability=0.0,
                    use_batch_norm=True,
                ),
                transforms.RandomPermutation(features=features),
            ]) for _ in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = distributions_.StandardNormal((features, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "nsf":
        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.PiecewiseRationalQuadraticCouplingTransform(
                    mask=create_alternating_binary_mask(features=features,
                                                        even=(i % 2 == 0)),
                    transform_net_create_fn=lambda in_features, out_features:
                    nets.ResidualNet(
                        in_features=in_features,
                        out_features=out_features,
                        hidden_features=hidden_features,
                        context_features=context_features,
                        num_blocks=2,
                        activation=torch.relu,
                        dropout_probability=0.0,
                        use_batch_norm=False,
                    ),
                    num_bins=10,
                    tails="linear",
                    tail_bound=3.0,
                    apply_unconditional_transform=False,
                ),
                transforms.LULinear(features, identity_init=True),
            ]) for i in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = distributions_.StandardNormal((features, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    elif model == "nsf_bounded":

        transform = transforms.CompositeTransform([
            transforms.CompositeTransform([
                transforms.PiecewiseRationalQuadraticCouplingTransform(
                    mask=create_alternating_binary_mask(
                        features=dim_distribution, even=(i % 2 == 0)),
                    transform_net_create_fn=lambda in_features, out_features:
                    nets.ResidualNet(
                        in_features=in_features,
                        out_features=out_features,
                        hidden_features=hidden_features,
                        context_features=context_features,
                        num_blocks=2,
                        activation=F.relu,
                        dropout_probability=0.0,
                        use_batch_norm=False,
                    ),
                    num_bins=10,
                    tails="linear",
                    tail_bound=np.sqrt(
                        3),  # uniform with sqrt(3) bounds has unit-variance
                    apply_unconditional_transform=False,
                ),
                transforms.RandomPermutation(features=dim_distribution),
            ]) for i in range(flow_num_transforms)
        ])

        transform = transforms.CompositeTransform(
            [standardizing_transform, transform])

        distribution = StandardUniform(shape=(dim_distribution, ))
        neural_net = flows.Flow(transform, distribution, embedding)

    else:
        raise ValueError

    return neural_net