def logistic_regression(X, y, class_weight=None, init_mean=None, init_stddev=1.0): """Creates logistic regression TensorFlow subgraph. Args: X: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard devation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used. """ with vs.variable_scope('logistic_regression'): logging_ops.histogram_summary('logistic_regression.X', X) logging_ops.histogram_summary('logistic_regression.y', y) # Set up the requested initialization. if (init_mean is None): weights = vs.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]]) bias = vs.get_variable('bias', [y.get_shape()[-1]]) else: weights = vs.get_variable('weights', [X.get_shape()[1], y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) bias = vs.get_variable('bias', [y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev)) logging_ops.histogram_summary('logistic_regression.weights', weights) logging_ops.histogram_summary('logistic_regression.bias', bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = ops.get_default_graph().get_tensor_by_name('class_weight:0') except KeyError: pass return losses_ops.softmax_classifier(X, y, weights, bias, class_weight=class_weight)
def logistic_regression(x, y, class_weight=None, init_mean=None, init_stddev=1.0): """Creates logistic regression TensorFlow subgraph. Args: x: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for target, shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard devation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used. """ with vs.variable_scope('logistic_regression'): scope_name = vs.get_variable_scope().name logging_ops.histogram_summary('%s.x' % scope_name, x) logging_ops.histogram_summary('%s.y' % scope_name, y) dtype = x.dtype.base_dtype # Set up the requested initialization. if init_mean is None: weights = vs.get_variable( 'weights', [x.get_shape()[1], y.get_shape()[-1]], dtype=dtype) bias = vs.get_variable('bias', [y.get_shape()[-1]], dtype=dtype) else: weights = vs.get_variable( 'weights', [x.get_shape()[1], y.get_shape()[-1]], initializer=init_ops.random_normal_initializer(init_mean, init_stddev, dtype=dtype), dtype=dtype) bias = vs.get_variable( 'bias', [y.get_shape()[-1]], initializer=init_ops.random_normal_initializer(init_mean, init_stddev, dtype=dtype), dtype=dtype) logging_ops.histogram_summary('%s.weights' % scope_name, weights) logging_ops.histogram_summary('%s.bias' % scope_name, bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = ops.get_default_graph().get_tensor_by_name( 'class_weight:0') except KeyError: pass return losses_ops.softmax_classifier(x, y, weights, bias, class_weight=class_weight)