Exemple #1
0
    def __init__(self,
                 n_inputs,
                 n_hiddens,
                 activation='relu',
                 input_order='sequential',
                 mode='sequential'):
        super(GaussianMADE, self).__init__(n_inputs)

        # save input arguments
        self.activation = activation
        self.n_inputs = n_inputs
        self.n_hiddens = n_hiddens
        self.mode = mode

        # create network's parameters
        self.degrees = create_degrees(n_inputs, n_hiddens, input_order, mode)
        self.Ms, self.Mmp = create_masks(self.degrees)
        self.Ws, self.bs, self.Wm, self.bm, self.Wp, self.bp = create_weights(
            n_inputs, n_hiddens, None)
        self.input_order = self.degrees[0]

        self.activation_function = get_activation_function(activation)

        # Output info
        self.m = None
        self.logp = None

        # Dtype and GPU / CPU management
        self.to_args = None
        self.to_kwargs = None
Exemple #2
0
    def __init__(self,
                 n_latent,
                 n_parameters,
                 n_hidden,
                 activation='tanh',
                 delta_z_model=True):

        super(CheckpointScoreEstimator, self).__init__()

        # Save input
        self.n_latent = n_latent
        self.n_parameters = n_parameters
        self.n_hidden = n_hidden
        self.activation = get_activation_function(activation)
        self.delta_z_model = delta_z_model

        # Build network
        self.layers = nn.ModuleList()

        if self.delta_z_model:
            n_last = n_latent + n_parameters
        else:
            n_last = 2 * n_latent + n_parameters

        # Hidden layers
        for n_hidden_units in n_hidden:
            self.layers.append(nn.Linear(n_last, n_hidden_units))
            n_last = n_hidden_units

        # Log r layer
        self.layers.append(nn.Linear(n_last, n_parameters))
Exemple #3
0
    def __init__(self,
                 n_inputs,
                 n_hiddens,
                 activation='relu',
                 input_order='sequential',
                 mode='sequential'):
        """
        Constructor.
        :param n_inputs: number of inputs
        :param n_hiddens: list with number of hidden units for each hidden layer
        :param input_order: order of inputs
        :param mode: strategy for assigning degrees to hidden nodes: can be 'random' or 'sequential'
        """

        super(GaussianMADE, self).__init__()

        # save input arguments
        self.activation = activation
        self.n_inputs = n_inputs
        self.n_hiddens = n_hiddens
        self.mode = mode

        # create network's parameters
        self.degrees = create_degrees(n_inputs, n_hiddens, input_order, mode)
        self.Ms, self.Mmp = create_masks(self.degrees)
        self.Ws, self.bs, self.Wm, self.bm, self.Wp, self.bp = create_weights(
            n_inputs, n_hiddens, None)
        self.input_order = self.degrees[0]

        self.activation_function = get_activation_function(activation)

        # Output info
        self.m = None
        self.logp = None
        self.log_likelihood = None
Exemple #4
0
    def __init__(self,
                 n_conditionals,
                 n_inputs,
                 n_hiddens,
                 n_components=10,
                 activation='relu',
                 input_order='sequential',
                 mode='sequential'):
        super(ConditionalMixtureMADE, self).__init__(n_conditionals, n_inputs)

        # save input arguments
        self.activation = activation
        self.n_conditionals = n_conditionals
        self.n_inputs = n_inputs
        self.n_hiddens = n_hiddens
        self.mode = mode
        self.n_components = n_components

        # create network's parameters
        self.degrees = create_degrees(n_inputs, n_hiddens, input_order, mode)
        self.Ms, self.Mmp = create_masks(self.degrees)
        logging.debug('Mmp shape: %s', self.Mmp.shape)
        (self.Wx, self.Ws, self.bs, self.Wm, self.bm, self.Wp, self.bp,
         self.Wa,
         self.ba) = create_weights_conditional(n_conditionals, n_inputs,
                                               n_hiddens, n_components)
        self.input_order = self.degrees[0]

        # Shaping things
        self.Mmp = self.Mmp.unsqueeze(2)
        self.ba.data = self.ba.data.unsqueeze(0)
        self.bp.data = self.bp.data.unsqueeze(0)
        self.bm.data = self.bm.data.unsqueeze(0)

        self.activation_function = get_activation_function(activation)

        # Output info. TODO: make these not properties of self
        self.m = None
        self.logp = None
        self.loga = None

        # Dtype and GPU / CPU management
        self.to_args = None
        self.to_kwargs = None
Exemple #5
0
    def __init__(self,
                 n_observables,
                 n_parameters,
                 n_hidden,
                 activation='tanh'):

        super(ParameterizedRatioEstimator, self).__init__()

        # Save input
        self.n_hidden = n_hidden
        self.activation = get_activation_function(activation)

        # Build network
        self.layers = nn.ModuleList()
        n_last = n_observables + n_parameters

        # Hidden layers
        for n_hidden_units in n_hidden:
            self.layers.append(nn.Linear(n_last, n_hidden_units))
            n_last = n_hidden_units

        # Log r layer
        self.layers.append(nn.Linear(n_last, 1))