def _create_block(flow_index):
     # For variational dequantization we apply a combination of activation normalization and coupling layers.
     # Invertible convolutions are not useful here as our dimensionality is 1 anyways
     mask = CouplingLayer.create_chess_mask()
     if flow_index % 2 == 0:
         mask = 1 - mask
     return [
         ActNormFlow(c_in=1, data_init=False),
         CouplingLayer(c_in=1,
                       mask=mask,
                       model_func=model_func,
                       block_type=block_type)
     ]
def _create_flows(num_dims, embed_dims, config):
    num_flows = get_param_val(config, "num_flows", 0)
    num_hidden_layers = get_param_val(config, "hidden_layers", 2)
    hidden_size = get_param_val(config, "hidden_size", 256)

    # We apply a linear net in the coupling layers for linear flows
    block_type_name = "LinearNet"
    block_fun_coup = lambda c_out: LinearNet(c_in=num_dims,
                                             c_out=c_out,
                                             num_layers=num_hidden_layers,
                                             hidden_size=hidden_size,
                                             ext_input_dims=embed_dims)

    # For the activation normalization, we map an embedding to scaling and bias with a single layer
    block_fun_actn = lambda: SimpleLinearLayer(
        c_in=embed_dims, c_out=2 * num_dims, data_init=True)

    permut_layer = lambda flow_index: InvertibleConv(c_in=num_dims)
    actnorm_layer = lambda flow_index: ExtActNormFlow(c_in=num_dims,
                                                      net=block_fun_actn())
    # We do not use mixture coupling layers here aas we need the inverse to be differentiable as well
    coupling_layer = lambda flow_index: CouplingLayer(
        c_in=num_dims,
        mask=CouplingLayer.create_channel_mask(c_in=num_dims),
        block_type=block_type_name,
        model_func=block_fun_coup)

    flow_layers = []
    if num_flows == 0 or num_dims == 1:  # Num_flows == 0 => mixture model, num_dims == 1 => coupling layers have no effect
        flow_layers += [actnorm_layer(flow_index=0)]
    else:
        for flow_index in range(num_flows):
            flow_layers += [
                actnorm_layer(flow_index),
                permut_layer(flow_index),
                coupling_layer(flow_index)
            ]

    return nn.ModuleList(flow_layers)