Beispiel #1
0
    def __init__(self, input_layer, output_nodes,
                 session=None,
                 bias=None,
                 weights=None,
                 back_bias=None,
                 bactivate=False,
                 freeze=False,
                 non_liniarity=tf.nn.sigmoid,
                 weight_extender_func=noise_weight_extender,
                 unsupervised_cost=1.,
                 supervised_cost=1.,
                 noise_std=None,
                 bactivation_loss_func=squared_loss,
                 name='Layer'):
        super(Layer, self).__init__(input_layer,
                                    output_nodes,
                                    session=session,
                                    weight_extender_func=weight_extender_func,
                                    freeze=freeze,
                                    name=name)
        self._non_liniarity = non_liniarity
        self._bactivate = bactivate
        self._unsupervised_cost = unsupervised_cost
        self._supervised_cost = supervised_cost
        self._noise_std = noise_std
        self._bactivation_loss_func = bactivation_loss_func

        self._weights = self._create_variable("weights",
                                              (BaseLayer.INPUT_BOUND_VALUE, BaseLayer.OUTPUT_BOUND_VALUE),
                                              weights if weights is not None else xavier_init(self._input_nodes,
                                                                                              self._output_nodes))

        self._bias = self._create_variable("bias",
                                           (BaseLayer.OUTPUT_BOUND_VALUE,),
                                           bias if bias is not None else tf.zeros((self._output_nodes,)))

        if self.bactivate:
            self._back_bias = self._create_variable("back_bias",
                                                    (BaseLayer.INPUT_BOUND_VALUE,),
                                                    back_bias if back_bias is not None else tf.zeros(
                                                        (self._input_nodes,)))
        else:
            self._back_bias = None

        if self._noise_std is not None:
            self._activation_corrupted = self.input_layer.activation_train + tf.random_normal(
                tf.shape(self.input_layer.activation_train),
                stddev=self._noise_std)
        else:
            self._activation_corrupted = self.input_layer.activation_train
 def __init__(self,
              input_layer,
              output_nodes,
              session=None,
              bias=None,
              weights=None,
              back_weights=None,
              back_bias=None,
              freeze=False,
              non_liniarity=tf.nn.relu,
              bactivation_loss_func=squared_loss,
              weight_extender_func=noise_weight_extender,
              unsupervised_cost=1.,
              supervised_cost=1.,
              noise_std=None,
              name='BackWeightLayer'):
     super(BackWeightLayer,
           self).__init__(input_layer,
                          output_nodes,
                          session=session,
                          bias=bias,
                          weights=weights,
                          back_bias=back_bias,
                          bactivate=True,
                          freeze=freeze,
                          non_liniarity=non_liniarity,
                          weight_extender_func=weight_extender_func,
                          bactivation_loss_func=bactivation_loss_func,
                          unsupervised_cost=unsupervised_cost,
                          supervised_cost=supervised_cost,
                          noise_std=noise_std,
                          name=name)
     self._back_weights = self._create_variable(
         "back_weights",
         (BaseLayer.OUTPUT_BOUND_VALUE, BaseLayer.INPUT_BOUND_VALUE),
         back_weights if back_weights is not None else xavier_init(
             self._output_nodes, self._input_nodes))
 def __init__(self, input_layer, output_nodes,
              session=None,
              bias=None,
              weights=None,
              back_weights=None,
              back_bias=None,
              freeze=False,
              non_liniarity=tf.nn.relu,
              bactivation_loss_func=squared_loss,
              weight_extender_func=noise_weight_extender,
              unsupervised_cost=1.,
              supervised_cost=1.,
              noise_std=None,
              name='BackWeightLayer'):
     super(BackWeightLayer, self).__init__(input_layer, output_nodes,
                                           session=session,
                                           bias=bias,
                                           weights=weights,
                                           back_bias=back_bias,
                                           bactivate=True,
                                           freeze=freeze,
                                           non_liniarity=non_liniarity,
                                           weight_extender_func=weight_extender_func,
                                           bactivation_loss_func=bactivation_loss_func,
                                           unsupervised_cost=unsupervised_cost,
                                           supervised_cost=supervised_cost,
                                           noise_std=noise_std,
                                           name=name)
     self._back_weights = self._create_variable("back_weights",
                                                (BaseLayer.OUTPUT_BOUND_VALUE, BaseLayer.INPUT_BOUND_VALUE),
                                                back_weights if back_weights is not None else xavier_init(
                                                    self._output_nodes,
                                                    self._input_nodes))
Beispiel #4
0
    def __init__(self,
                 input_layer,
                 output_nodes,
                 hidden_recog_nodes_1,
                 hidden_recog_nodes_2,
                 hidden_generation_nodes_1,
                 hidden_generation_nodes_2,
                 session=None,
                 hidden_recog_weights_1=None,
                 hidden_recog_weights_2=None,
                 hidden_recog_bias_1=None,
                 hidden_recog_bias_2=None,
                 hidden_generation_weights_1=None,
                 hidden_generation_weights_2=None,
                 hidden_generation_bias_1=None,
                 hidden_generation_bias_2=None,
                 output_mean_weights=None,
                 output_mean_bias=None,
                 output_var_weights=None,
                 output_var_bias=None,
                 reconstruction_mean_weights=None,
                 reconstruction_mean_bias=None,
                 freeze=False,
                 non_liniarity=tf.nn.softplus,
                 weight_extender_func=noise_weight_extender,
                 unsupervised_cost=1.,
                 supervised_cost=1.,
                 name='VariationalAutoencoderLayer'):
        super(VariationalAutoencoderLayer,
              self).__init__(input_layer,
                             output_nodes,
                             session=session,
                             freeze=freeze,
                             weight_extender_func=weight_extender_func,
                             name=name)
        self._unsupervised_cost = unsupervised_cost
        self._non_linarity = non_liniarity
        self._hidden_recog_nodes_1 = hidden_recog_nodes_1
        self._hidden_recog_nodes_2 = hidden_recog_nodes_2
        self._hidden_generation_nodes_1 = hidden_generation_nodes_1
        self._hidden_generation_nodes_2 = hidden_generation_nodes_2

        self._hidden_recog_weights_1 = self._create_variable(
            "hidden_recog_weights_1",
            (BaseLayer.INPUT_BOUND_VALUE, self._hidden_recog_nodes_1),
            hidden_recog_weights_1 if hidden_recog_weights_1 is not None else
            xavier_init(self._input_nodes, self._hidden_recog_nodes_1))
        self._hidden_recog_bias_1 = self._create_variable(
            "hidden_recog_bias_1", (self._hidden_recog_nodes_1, ),
            hidden_recog_bias_1
            if hidden_recog_bias_1 is not None else tf.zeros(
                (self._hidden_recog_nodes_1, )))
        self._hidden_recog_weights_2 = self._create_variable(
            "hidden_recog_weights_2",
            (self._hidden_recog_nodes_1, self._hidden_recog_nodes_2),
            hidden_recog_weights_2
            if hidden_recog_weights_2 is not None else xavier_init(
                self._hidden_recog_nodes_1, self._hidden_recog_nodes_2))
        self._hidden_recog_bias_2 = self._create_variable(
            "hidden_recog_bias_2", (self._hidden_recog_nodes_2, ),
            hidden_recog_bias_2
            if hidden_recog_bias_2 is not None else tf.zeros(
                (self._hidden_recog_nodes_2, )))
        self._hidden_generation_weights_1 = self._create_variable(
            "hidden_generation_weights_1",
            (BaseLayer.OUTPUT_BOUND_VALUE, self._hidden_generation_nodes_1),
            hidden_generation_weights_1
            if hidden_generation_weights_1 is not None else xavier_init(
                self._output_nodes, self._hidden_generation_nodes_1))
        self._hidden_generation_bias_1 = self._create_variable(
            "hidden_generation_bias_1", (self._hidden_generation_nodes_1, ),
            hidden_generation_bias_1
            if hidden_generation_bias_1 is not None else tf.zeros(
                (self._hidden_generation_nodes_1, )))
        self._hidden_generation_weights_2 = self._create_variable(
            "hidden_generation_weights_2",
            (self._hidden_generation_nodes_1, self._hidden_generation_nodes_2),
            hidden_generation_weights_2 if hidden_generation_weights_2
            is not None else xavier_init(self._hidden_generation_nodes_1,
                                         self._hidden_generation_nodes_2))
        self._hidden_generation_bias_2 = self._create_variable(
            "hidden_generation_bias_2", (self._hidden_generation_nodes_2, ),
            hidden_generation_bias_2
            if hidden_generation_bias_2 is not None else tf.zeros(
                (self._hidden_generation_nodes_2, )))
        self._output_mean_weights = self._create_variable(
            "output_mean_weights",
            (self._hidden_recog_nodes_2, BaseLayer.OUTPUT_BOUND_VALUE),
            output_mean_weights if output_mean_weights is not None else
            xavier_init(self._hidden_recog_nodes_2, self._output_nodes))
        self._output_mean_bias = self._create_variable(
            "output_mean_bias", (BaseLayer.OUTPUT_BOUND_VALUE, ),
            output_mean_bias if output_mean_bias is not None else tf.zeros(
                (self._output_nodes, )))
        self._output_var_weights = self._create_variable(
            "output_var_weights",
            (self._hidden_recog_nodes_2, BaseLayer.OUTPUT_BOUND_VALUE),
            output_var_weights if output_var_weights is not None else
            xavier_init(self._hidden_recog_nodes_2, self._output_nodes))
        self._output_var_bias = self._create_variable(
            "output_var_bias", (BaseLayer.OUTPUT_BOUND_VALUE, ),
            output_var_bias if output_var_bias is not None else tf.zeros(
                (self._output_nodes, )))
        self._reconstruction_mean_weights = self._create_variable(
            "reconstruction_mean_weights",
            (self._hidden_generation_nodes_2, BaseLayer.INPUT_BOUND_VALUE),
            reconstruction_mean_weights
            if reconstruction_mean_weights is not None else xavier_init(
                self._hidden_generation_nodes_2, self._input_nodes))
        self._reconstruction_mean_bias = self._create_variable(
            "reconstruction_mean_bias", (BaseLayer.INPUT_BOUND_VALUE, ),
            reconstruction_mean_bias
            if reconstruction_mean_bias is not None else tf.zeros(
                (self._input_nodes, )))

        self._z_mean_train, self._z_log_sigma_sq_train = self.recognition(
            self.input_layer.activation_train)
        self._z_mean_predict, self._z_log_sigma_sq_predict = self.recognition(
            self.input_layer.activation_predict)

        eps = tf.random_normal(tf.shape(self._z_mean_train),
                               0,
                               1,
                               dtype=tf.float32)
        # z = mu + sigma*epsilon
        self._z_train = tf.add(
            self._z_mean_train,
            tf.mul(tf.sqrt(tf.exp(self._z_log_sigma_sq_train)), eps))

        self._x_reconstruction_train = self.generator(self._z_train)
        self._x_reconstruction_predict = self.generator(self._z_mean_predict)
    def __init__(self, input_layer, output_nodes,
                 session=None,
                 bias=None,
                 weights=None,
                 back_weights=None,
                 back_bias=None,
                 freeze=False,
                 non_liniarity=tf.nn.relu,
                 weight_extender_func=noise_weight_extender,
                 bactivation_loss_func=squared_loss,
                 unsupervised_cost=1.,
                 supervised_cost=1.,
                 noise_std=None,
                 name='BackWeightLayer'):
        super(BackWeightCandidateLayer, self).__init__(input_layer, output_nodes,
                                                       session=session,
                                                       bias=bias,
                                                       weights=weights,
                                                       back_weights=back_weights,
                                                       back_bias=back_bias,
                                                       freeze=freeze,
                                                       bactivation_loss_func=bactivation_loss_func,
                                                       non_liniarity=non_liniarity,
                                                       weight_extender_func=weight_extender_func,
                                                       unsupervised_cost=unsupervised_cost,
                                                       supervised_cost=supervised_cost,
                                                       noise_std=noise_std,
                                                       name=name)

        self._candidate_bias = self._create_variable("candidate_bias",
                                                     (self.CANDIDATES,),
                                                     np.zeros(self.CANDIDATES, dtype=np.float32),
                                                     is_kwarg=False)
        self._candidate_weights = self._create_variable("candidate_weights",
                                                        (self.INPUT_BOUND_VALUE, self.CANDIDATES),
                                                        xavier_init(
                                                            self.input_nodes,
                                                            1),
                                                        is_kwarg=False)
        self._candidate_back_bias = self._create_variable("candidate_back_bias",
                                                          (self.CANDIDATES,),
                                                          np.zeros(self.CANDIDATES, dtype=np.float32),
                                                          is_kwarg=False)
        self._candidate_back_weights = self._create_variable("candidate_back_weights",
                                                             (self.CANDIDATES, self.INPUT_BOUND_VALUE),
                                                             xavier_init(
                                                                 1,
                                                                 self.input_nodes),
                                                             is_kwarg=False)

        self._candidate_bactivation_predict = self._non_liniarity(
            tf.matmul(
                self._non_liniarity(
                    tf.matmul(self.input_layer.activation_predict, self._candidate_weights) + self._candidate_bias),
                self._candidate_back_weights) + self._candidate_back_bias)

        self._candidate_bactivation_train = self._non_liniarity(
            tf.matmul(
                self._non_liniarity(
                    tf.matmul(self.input_layer.activation_train, self._candidate_weights) + self._candidate_bias),
                self._candidate_back_weights) + self._candidate_back_bias)

        self.session.run(tf.initialize_variables([self._candidate_weights, self._candidate_bias,
                                                  self._candidate_back_bias, self._candidate_back_weights]))