def test_original_biased_nonlin_semi_nmf(self):
     auv = sio.loadmat(mat_file)
     u, v = auv['u'], auv['v']
     a = relu(u @ v)
     bias_v = np.vstack((v, np.ones((1, v.shape[1]))))
     old_loss = np_frobenius_norm(a, u @ v)
     
     a_ph = tf.placeholder(tf.float64, shape=a.shape)
     u_ph = tf.placeholder(tf.float64, shape=u.shape)
     bias_v_ph = tf.placeholder(tf.float64, shape=bias_v.shape)
     
     tf_bias_u, tf_v = nonlin_semi_nmf(a_ph, u_ph, bias_v_ph, use_bias=True, use_tf=True, num_calc_v=0)
     
     init = tf.global_variables_initializer()
     with tf.Session() as sess:
         init.run()
         
         start_time = time.time()
         _u, _bias_v = sess.run([tf_bias_u, tf_v], feed_dict={a_ph: a, u_ph: u, bias_v_ph: bias_v})
         end_time = time.time()
     
     duration = end_time - start_time
     _bias_u = np.hstack((_u, np.ones((_u.shape[0], 1))))
     new_loss = np_frobenius_norm(a, relu(_bias_u @ _bias_v))
     assert a.shape == (_bias_u @ _bias_v).shape
     assert new_loss < old_loss, "new loss should be less than old loss."
     print_format('TensorFlow', 'biased Nonlinear semi-NMF(NOT CALC v)', a, _bias_u, _bias_v, old_loss, new_loss,
                  duration)
Example #2
0
    def test_np_not_calc_v_biased_nonlin_semi_nmf(self):
        auv = sio.loadmat(mat_file)
        a, u, v = auv['a'], auv['u'], auv['v']
        old_loss = np_frobenius_norm(a, u @ v)

        biased_u = np.hstack((u, np.ones((u.shape[0], 1))))
        start_time = time.time()

        biased_u, v = nonlin_semi_nmf(a,
                                      biased_u,
                                      v,
                                      use_bias=True,
                                      num_calc_v=0)

        end_time = time.time()
        duration = end_time - start_time

        bias_v = np.vstack((v, np.ones((1, v.shape[1]))))

        new_loss = np_frobenius_norm(a, relu(biased_u @ bias_v))
        assert a.shape == (biased_u @ bias_v).shape
        assert new_loss < old_loss, "new loss should be less than old loss."
        print('\n[Numpy]Solve biased Nonlinear semi-NMF(NOT CALCULATE v)\n\t'
              'old loss {0}\n\t'
              'new loss {1}\n\t'
              'process duration {2}'.format(old_loss, new_loss, duration))
 def test_tf_not_calc_v_nonlin_semi_nmf(self):
     auv = sio.loadmat(mat_file)
     a, u, v = auv['a'], auv['u'], auv['v']
     old_loss = np_frobenius_norm(a, u @ v)
     
     # [1000, 500]
     a_ph = tf.placeholder(tf.float64, shape=a.shape)
     # [1000, 201]
     u_ph = tf.placeholder(tf.float64, shape=u.shape)
     # [200, 500]
     v_ph = tf.placeholder(tf.float64, shape=v.shape)
     tf_u, tf_v = nonlin_semi_nmf(a_ph, u_ph, v_ph, use_tf=True, use_bias=False, num_calc_v=0, num_calc_u=1)
     tf_loss = frobenius_norm(a_ph, tf.nn.relu(tf.matmul(tf_u, tf_v)))
     
     init = tf.global_variables_initializer()
     with tf.Session() as sess:
         init.run()
         
         start_time = time.time()
         _u, _v, new_loss = sess.run([tf_u, tf_v, tf_loss], feed_dict={a_ph: a, u_ph: u, v_ph: v})
         end_time = time.time()
     
     duration = end_time - start_time
     assert a.shape == (_u @ _v).shape
     assert new_loss < old_loss, "new loss should be less than old loss."
     print_format('TensorFlow', 'Nonlinear semi-NMF(NOT CALCLATE v)', a, u, v, old_loss, new_loss, duration)
Example #4
0
    def _autoencoder(self):

        updates = []
        layers = self._layers[:-1]
        for i, layer in enumerate(layers):
            a = layer.output  # [3000, 784]
            u = self._layers[i + 1].output
            kernel = layer.kernel
            temporary_shape = utility.transpose_shape(kernel)  # [1000, 784]
            if layer.use_bias:
                temporary_shape[0] += 1
                kernel = tf.concat((kernel, layer.bias[None, ...]), axis=0)
            temporary_kernel = tf.get_variable(
                'temporal_{}'.format(i),
                temporary_shape,
                dtype=tf.float64,
                initializer=tf.contrib.layers.xavier_initializer(),
                trainable=False)
            u, _ = mf.semi_nmf(
                a=a,
                u=u,
                v=temporary_kernel,
                use_tf=True,
                use_bias=layer.use_bias,
                num_iters=1,
                first_nneg=True,
            )

            # Not use activation (ReLU)
            if not layer.activation:
                _, v = mf.semi_nmf(
                    a=u,
                    u=a,
                    v=kernel,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_iters=1,
                    first_nneg=True,
                )
            # Use activation (ReLU)
            # else utility.get_op_name(layer.activation) == 'Relu':
            else:
                _, v = mf.nonlin_semi_nmf(
                    a=u,
                    u=a,
                    v=kernel,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_calc_v=0,
                    num_calc_u=1,
                    first_nneg=True,
                )
            if layer.use_bias:
                v, bias = utility.split_v_bias(v)
                updates.append(layer.bias.assign(bias))
            updates.append(layer.kernel.assign(v))
        return tf.group(*updates)
Example #5
0
 def minimize(self, loss=None):
     """Construct the control dependencies for calculating neural net optimized.
     
     Returns:
         tf.no_op.
         The import
     """
     self._init(loss)
     
     if self._use_autoencoder:
         self._autoencoder()
     
     a = self.labels
     updates = []
     # Reverse
     layers = self._layers[::-1]
     for i, layer in enumerate(layers):
         _u = layer.output
         v = layer.kernel
         
         # Check whether u is a tensor or not.
         #  that is Recurrent output if it have dim more than 3.
         if _u.shape.ndims >= 3 and not isinstance(layer.recurrent, tf.Variable):
             u = _u[:, -1, :]
         else:
             u = _u
             
         if isinstance(layer.recurrent, tf.Variable):
             v = tf.concat((layer.kernel, layer.recurrent), axis=0)
         
         if layer.use_bias:
             v = tf.concat((v, layer.bias[None, ...]), axis=0)
         
         # Not use activation (ReLU)
         if not layer.activation:
             u, v = mf.semi_nmf(a=a, u=u, v=v,
                                use_tf=True,
                                use_bias=layer.use_bias,
                                num_iters=1,
                                first_nneg=True,
                                )
         # Use activation (ReLU)
         else:
             u, v = mf.nonlin_semi_nmf(a=a, u=u, v=v,
                                       use_tf=True,
                                       use_bias=layer.use_bias,
                                       num_calc_v=1,
                                       num_calc_u=1,
                                       first_nneg=True,
                                       )
         if layer.use_bias:
             v, bias = utility.split_v_bias(v)
             updates.append(layer.bias.assign(bias))
         updates.append(layer.kernel.assign(v))
         a = tf.identity(_u)
     
     return tf.group(*updates)
Example #6
0
    def minimize(self, loss=None):
        """Construct the control dependencies for calculating neural net optimized.
        
        Returns:
            tf.no_op.
            The import
        """
        self._init(loss)

        if self._use_autoencoder:
            self._autoencoder()

        a = self.labels
        updates = []
        # Reverse
        layers = self._layers[::-1]
        for i, layer in enumerate(layers):
            u = layer.output
            v = layer.kernel
            if layer.use_bias:
                v = tf.concat((v, layer.bias[None, ...]), axis=0)

            # Not use activation (ReLU)
            if not layer.activation:
                u, v = mf.semi_nmf(
                    a=a,
                    u=u,
                    v=v,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_iters=1,
                    first_nneg=True,
                )
            # Use activation (ReLU)
            else:
                u, v = mf.nonlin_semi_nmf(
                    a=a,
                    u=u,
                    v=v,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_calc_v=1,
                    num_calc_u=1,
                    first_nneg=True,
                )
            if layer.use_bias:
                v, bias = utility.split_v_bias(v)
                updates.append(layer.bias.assign(bias))
            updates.append(layer.kernel.assign(v))
            a = tf.identity(u)

        return tf.group(*updates)
 def test_np_not_calc_v_vanilla_nonlin_semi_nmf(self):
     a = np.random.uniform(0., 1., size=(100, 100))
     u = np.random.uniform(0., 1., size=(100, 300))
     v = np.random.uniform(-1., 1., size=(300, 100))
     old_loss = np_frobenius_norm(a, u @ v)
     
     start_time = time.time()
     
     u, v = nonlin_semi_nmf(a, u, v, use_bias=False, num_calc_v=0)
     assert np.min(u) > 0, np.min(u)
     
     end_time = time.time()
     duration = end_time - start_time
     
     new_loss = np_frobenius_norm(a, relu(u @ v))
     assert a.shape == (u @ v).shape
     assert new_loss < old_loss, "new loss should be less than old loss."
     print_format('Numpy', 'Nonlinear semi-NMF(NOT CALCULATE v)', a, u, v, old_loss, new_loss, duration)
Example #8
0
    def test_np_not_calc_v_vanilla_nonlin_semi_nmf(self):
        auv = sio.loadmat(mat_file)
        a, u, v = auv['a'], auv['u'], auv['v']
        old_loss = np_frobenius_norm(a, u @ v)

        start_time = time.time()

        u, v = nonlin_semi_nmf(a, u, v, use_bias=False, num_calc_v=0)

        end_time = time.time()
        duration = end_time - start_time

        new_loss = np_frobenius_norm(a, relu(u @ v))
        assert a.shape == (u @ v).shape
        assert new_loss < old_loss, "new loss should be less than old loss."
        print('\n[Numpy]Solve Nonlinear semi-NMF(NOT CALCULATE v)\n\t'
              'old loss {0}\n\t'
              'new loss {1}\n\t'
              'process duration {2}'.format(old_loss, new_loss, duration))
 def test_np_not_calc_v_biased_nonlin_semi_nmf(self):
     a = np.random.uniform(0., 1., size=(100, 100))
     u = np.random.uniform(0., 1., size=(100, 300))
     v = np.random.uniform(-1., 1., size=(300, 100))
     old_loss = np_frobenius_norm(a, u @ v)
     bias_v = np.vstack((v, np.ones((1, v.shape[1]))))
     
     start_time = time.time()
     
     u, bias_v = nonlin_semi_nmf(a, u, bias_v, use_bias=True, num_calc_v=0)
     assert np.min(u) > 0, np.min(u)
     
     end_time = time.time()
     duration = end_time - start_time
     
     bias_u = np.hstack((u, np.ones((u.shape[0], 1))))
     
     new_loss = np_frobenius_norm(a, relu(bias_u @ bias_v))
     assert a.shape == (bias_u @ bias_v).shape
     assert new_loss < old_loss, "new loss should be less than old loss."
     print_format('Numpy', 'biased Nonlinear semi-NMF(NOT CALC v)', a, bias_u, bias_v, old_loss, new_loss, duration)
Example #10
0
    def test_tf_not_calc_v_biased_nonlin_semi_nmf(self):
        auv = sio.loadmat(mat_file)
        a, u, v = auv['a'], auv['u'], auv['v']
        bias_u = np.hstack((u, np.ones((u.shape[0], 1))))
        old_loss = np_frobenius_norm(a, u @ v)

        a_ph = tf.placeholder(tf.float64, shape=a.shape)
        bias_u_ph = tf.placeholder(tf.float64, shape=bias_u.shape)
        v_ph = tf.placeholder(tf.float64, shape=v.shape)

        tf_bias_u, tf_v = nonlin_semi_nmf(a_ph,
                                          bias_u_ph,
                                          v_ph,
                                          num_calc_v=0,
                                          use_bias=True,
                                          use_tf=True)

        init = tf.global_variables_initializer()
        with tf.Session() as sess:
            init.run()

            start_time = time.time()
            _bias_u, _v = sess.run([tf_bias_u, tf_v],
                                   feed_dict={
                                       a_ph: a,
                                       bias_u_ph: bias_u,
                                       v_ph: v
                                   })
            end_time = time.time()

        duration = end_time - start_time
        _bias_v = np.vstack((_v, np.ones((1, v.shape[1]))))
        new_loss = np_frobenius_norm(a, relu(_bias_u @ _bias_v))
        assert a.shape == (_bias_u @ _bias_v).shape
        assert new_loss < old_loss, "new loss should be less than old loss."
        print(
            '\n[TensorFlow]Solve biased Nonlinear semi-NMF(NOT CALCULATE v)\n\t'
            'old loss {0}\n\t'
            'new loss {1}\n\t'
            'process duration {2}'.format(old_loss, new_loss, duration))
Example #11
0
    def minimize(self, loss=None, pretrain=False):
        """Construct the control dependencies for calculating neural net optimized.
        
        Returns:
            tf.no_op.
            The import
        """
        self._init(loss)
        # pre-train with auto encoder.
        pretrain_op = self._autoencoder() if pretrain else tf.no_op()

        a = self.labels
        updates = []
        # Reverse
        layers = self._layers[::-1]
        for i, layer in enumerate(layers):
            u = layer.output
            v = layer.kernel
            if layer.use_bias:
                v = tf.concat((v, layer.bias[None, ...]), axis=0)

            # Not use activation (ReLU)
            if not layer.activation:
                u, v = mf.semi_nmf(
                    a=a,
                    u=u,
                    v=v,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_iters=1,
                    first_nneg=True,
                )
            # Use activation (ReLU)
            elif utility.get_op_name(layer.activation) == 'Relu':
                u, v = mf.nonlin_semi_nmf(
                    a=a,
                    u=u,
                    v=v,
                    use_tf=True,
                    use_bias=layer.use_bias,
                    num_calc_v=1,
                    num_calc_u=1,
                    first_nneg=True,
                )
            # Use Softmax
            elif utility.get_op_name(layer.activation) == 'Softmax':
                print('used softmax!!')
                u, v = mf.softmax_nmf(
                    a=a,
                    u=u,
                    v=v,
                    use_tf=True,
                    use_bias=layer.use_bias,
                )
            if layer.use_bias:
                v, bias = utility.split_v_bias(v)
                updates.append(layer.bias.assign(bias))
            updates.append(layer.kernel.assign(v))
            a = tf.identity(u)

        return AttrDict(ae=pretrain_op, nmf=tf.group(*updates))