def logistic_regression(X, y, class_weight=None): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided all ones are used. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): tf.histogram_summary('logistic_regression.X', X) tf.histogram_summary('logistic_regression.y', y) weights = tf.get_variable( 'weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = tf.get_variable('bias', [y.get_shape()[-1]]) tf.histogram_summary('logistic_regression.weights', weights) tf.histogram_summary('logistic_regression.bias', bias) return softmax_classifier(X, y, weights, bias, class_weight=class_weight)
def logistic_regression(X, y, class_weight=None): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided all ones are used. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): tf.histogram_summary('logistic_regression.X', X) tf.histogram_summary('logistic_regression.y', y) weights = tf.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = tf.get_variable('bias', [y.get_shape()[-1]]) tf.histogram_summary('logistic_regression.weights', weights) tf.histogram_summary('logistic_regression.bias', bias) return softmax_classifier(X, y, weights, bias, class_weight=class_weight)
def logistic_regression(X, y, class_weight=None): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): tf.histogram_summary('logistic_regression.X', X) tf.histogram_summary('logistic_regression.y', y) weights = tf.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = tf.get_variable('bias', [y.get_shape()[-1]]) tf.histogram_summary('logistic_regression.weights', weights) tf.histogram_summary('logistic_regression.bias', bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = tf.get_default_graph().get_tensor_by_name('class_weight:0') except KeyError: pass return softmax_classifier(X, y, weights, bias, class_weight=class_weight)
def __init__(self, n_classes, input_shape, graph): with graph.as_default(): self.global_step = tf.Variable(0, name="global_step", trainable=False) self.inp = tf.placeholder(tf.float32, [None, input_shape], name="input") self.out = tf.placeholder(tf.float32, [None, n_classes], name="output") self.weights = tf.get_variable("weights", [input_shape, n_classes]) self.bias = tf.get_variable("bias", [n_classes]) self.predictions, self.loss = softmax_classifier( self.inp, self.out, self.weights, self.bias)
def test_softmax_classifier(self): with self.test_session() as session: features = tf.placeholder(tf.float32, [None, 3]) labels = tf.placeholder(tf.float32, [None, 2]) weights = tf.constant([[0.1, 0.1], [0.1, 0.1], [0.1, 0.1]]) biases = tf.constant([0.2, 0.3]) class_weight = tf.constant([0.1, 0.9]) prediction, loss = ops.softmax_classifier(features, labels, weights, biases, class_weight) self.assertEqual(prediction.get_shape()[1], 2) self.assertEqual(loss.get_shape(), []) value = session.run(loss, {features: [[0.2, 0.3, 0.2]], labels: [[0, 1]]}) self.assertAllClose(value, 0.55180627)
def logistic_regression(X, y): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features. y: tensor or placeholder for target. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): weights = tf.get_variable('weights', [X.get_shape()[1], y.get_shape()[1]]) bias = tf.get_variable('bias', [y.get_shape()[1]]) return softmax_classifier(X, y, weights, bias)
def logistic_regression(X, y): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features. y: tensor or placeholder for target. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): weights = tf.get_variable( 'weights', [X.get_shape()[1], y.get_shape()[1]]) bias = tf.get_variable('bias', [y.get_shape()[1]]) return softmax_classifier(X, y, weights, bias)
def test_softmax_classifier(self): with self.test_session() as session: features = tf.placeholder(tf.float32, [None, 3]) labels = tf.placeholder(tf.float32, [None, 2]) weights = tf.constant([[0.1, 0.1], [0.1, 0.1], [0.1, 0.1]]) biases = tf.constant([0.2, 0.3]) class_weight = tf.constant([0.1, 0.9]) prediction, loss = ops.softmax_classifier(features, labels, weights, biases, class_weight) self.assertEqual(prediction.get_shape()[1], 2) self.assertEqual(loss.get_shape(), []) value = session.run(loss, { features: [[0.2, 0.3, 0.2]], labels: [[0, 1]] }) self.assertAllClose(value, 0.55180627)
def logistic_regression(X, y): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): tf.histogram_summary('logistic_regression.X', X) tf.histogram_summary('logistic_regression.y', y) weights = tf.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = tf.get_variable('bias', [y.get_shape()[-1]]) tf.histogram_summary('logistic_regression.weights', weights) tf.histogram_summary('logistic_regression.bias', bias) return softmax_classifier(X, y, weights, bias)
def logistic_regression(X, y, class_weight=None): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. Returns: Predictions and loss tensors. """ with tf.variable_scope('logistic_regression'): tf.histogram_summary('logistic_regression.X', X) tf.histogram_summary('logistic_regression.y', y) weights = tf.get_variable( 'weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = tf.get_variable('bias', [y.get_shape()[-1]]) tf.histogram_summary('logistic_regression.weights', weights) tf.histogram_summary('logistic_regression.bias', bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = tf.get_default_graph().get_tensor_by_name( 'class_weight:0') except KeyError: pass return softmax_classifier(X, y, weights, bias, class_weight=class_weight)