Exemple #1
0
    def __init__(self, layer_sizes, input_tensor, learning_rate=.001, hidden_tensor=None, n_fantasy_states=100, persistent_contrastive_divergence=True, constrastive_divergence_level=1):
        self.layer_sizes = layer_sizes
        self.machines = []

        current_input = input_tensor
        current_input_size = int(input_tensor.get_shape()[-1])
        for i, size in enumerate(layer_sizes):
            rbm = RBM(current_input_size, size, current_input, learning_rate=learning_rate,
                      n_fantasy_states=n_fantasy_states, name='dbn_{}'.format(i), persistent_contrastive_divergence=persistent_contrastive_divergence, contrastive_divergence_level=constrastive_divergence_level)
            current_input_size = size
            current_input = rbm.hidden_state_plus
            self.machines.append(rbm)
            # def __init__(self, n_features, n_hidden, visible_placeholder=None, contrastive_divergence_level=1, learning_rate=1e-3,
            #              persistent_contrastive_divergence=True, n_fantasy_states=100, name='rbm'):

        if hidden_tensor is None:
            self.hidden_tensor = tf.placeholder(tf.float32, shape=[None, layer_sizes[-1]])
        else:
            self.hidden_tensor = hidden_tensor
        current_hidden = self.hidden_tensor
        self.hidden_tensor = self.hidden_tensor
        for i in range(len(layer_sizes)):
            # dirty hack to connect back to front for sampling
            # TODO: Make this better
            index = len(layer_sizes) - 1 - i
            rbm = self.machines[index]
            with tf.name_scope(rbm.name):
                visible_prop = tf.nn.sigmoid(tf.matmul(current_hidden, rbm.w, transpose_b=True) + rbm.b_v)
                rbm.visible_from_hidden = RBM._sample_from(visible_prop, 'visible_from_hidden')
                current_hidden = rbm.visible_from_hidden