def logistic_regression(X, y, class_weight=None, init_mean=None, init_stddev=1.0): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard devation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used. """ with vs.variable_scope('logistic_regression'): logging_ops.histogram_summary('logistic_regression.X', X) logging_ops.histogram_summary('logistic_regression.y', y) # Set up the requested initialization. if (init_mean is None): weights = vs.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = vs.get_variable('bias', [y.get_shape()[-1]]) else: weights = vs.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) bias = vs.get_variable('bias', [y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) logging_ops.histogram_summary('logistic_regression.weights', weights) logging_ops.histogram_summary('logistic_regression.bias', bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = ops.get_default_graph().get_tensor_by_name('class_weight:0') except KeyError: pass return softmax_classifier(X, y, weights, bias, class_weight=class_weight)
def test_softmax_classifier(self): with self.test_session() as session: features = tf.placeholder(tf.float32, [None, 3]) labels = tf.placeholder(tf.float32, [None, 2]) weights = tf.constant([[0.1, 0.1], [0.1, 0.1], [0.1, 0.1]]) biases = tf.constant([0.2, 0.3]) class_weight = tf.constant([0.1, 0.9]) prediction, loss = ops.softmax_classifier(features, labels, weights, biases, class_weight) self.assertEqual(prediction.get_shape()[1], 2) self.assertEqual(loss.get_shape(), []) value = session.run(loss, {features: [[0.2, 0.3, 0.2]], labels: [[0, 1]]}) self.assertAllClose(value, 0.55180627)
def logistic_regression(X, y, class_weight=None, init_mean=None, init_stddev=1.0): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard devation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used. """ with vs.variable_scope('logistic_regression'): logging_ops.histogram_summary('logistic_regression.X', X) logging_ops.histogram_summary('logistic_regression.y', y) # Set up the requested initialization. if (init_mean is None): weights = vs.get_variable( 'weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = vs.get_variable('bias', [y.get_shape()[-1]]) else: weights = vs.get_variable( 'weights', [X.get_shape()[1], y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) bias = vs.get_variable( 'bias', [y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) logging_ops.histogram_summary('logistic_regression.weights', weights) logging_ops.histogram_summary('logistic_regression.bias', bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = ops.get_default_graph().get_tensor_by_name( 'class_weight:0') except KeyError: pass return softmax_classifier(X, y, weights, bias, class_weight=class_weight)