def mult_log_reg(tensor_in, numclasses=None, data=None, dtype=tf.float32, initrange=1e-10, seed=None, l2=0.0, name='log_reg'): """ Performs mulitnomial logistic regression forward pass. Weights and bias initialized to zeros. :param tensor_in: A tensor_ or placeholder_ :param numclasses: For classificatio :param data: For shape inference. :param dtype: For :any:`weights` initialization. :param initrange: For :any:`weights` initialization. :param seed: For :any:`weights` initialization. :param l2: For :any:`weights` initialization. :param name: For `variable_scope`_ :return: A tensor shape=(tensor_in.shape[0], numclasses) """ if data is not None: if type(data) is loader.HotIndex: numclasses = data.dim elif loader.is_one_hot(data): numclasses = data.shape[1] else: raise MissingShapeError('Can not infer shape from data: %s' % data) elif numclasses is None: raise MissingShapeError( 'Can not infer shape. Need numclasses or data argument.') inshape = tensor_in.get_shape().as_list() W = weights('uniform', [inshape[1], numclasses], dtype=dtype, initrange=initrange, seed=seed, l2=l2, name=name + '_weights') b = weights('uniform', [numclasses], dtype=dtype, initrange=initrange, seed=seed, l2=l2, name=name + '_bias') tensor_out = tf.nn.softmax(tf.matmul(tensor_in, W) + b) return tensor_out
def test_is_one_hot_true_sparse(): w = sps.csr_matrix(np.array([[1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]])) assert loader.is_one_hot(w)
def test_is_one_hot_false3_dense(): w = np.array([0, 0, 1]) assert not loader.is_one_hot(w)
def test_is_one_hot_false2_dense(): w = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) assert not loader.is_one_hot(w)
def test_is_one_hot_false1_sparse(): w = sps.csr_matrix(np.array([[5, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]])) assert not loader.is_one_hot(w)
def test_is_one_hot_true_dense(): w = np.array([[1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) assert loader.is_one_hot(w)