Example #1
0
def linear_regression(x, y, init_mean=None, init_stddev=1.0):
    """Creates linear regression TensorFlow subgraph.

  Args:
    x: tensor or placeholder for input features.
    y: tensor or placeholder for target.
    init_mean: the mean value to use for initialization.
    init_stddev: the standard devation to use for initialization.

  Returns:
    Predictions and loss tensors.

  Side effects:
    The variables linear_regression.weights and linear_regression.bias are
    initialized as follows.  If init_mean is not None, then initialization
    will be done using a random normal initializer with the given init_mean
    and init_stddv.  (These may be set to 0.0 each if a zero initialization
    is desirable for convex use cases.)  If init_mean is None, then the
    uniform_unit_scaling_initialzer will be used.
  """
    with vs.variable_scope('linear_regression'):
        scope_name = vs.get_variable_scope().name
        logging_ops.histogram_summary('%s.x' % scope_name, x)
        logging_ops.histogram_summary('%s.y' % scope_name, y)
        dtype = x.dtype.base_dtype
        y_shape = y.get_shape()
        if len(y_shape) == 1:
            output_shape = 1
        else:
            output_shape = y_shape[1]
        # Set up the requested initialization.
        if init_mean is None:
            weights = vs.get_variable('weights',
                                      [x.get_shape()[1], output_shape],
                                      dtype=dtype)
            bias = vs.get_variable('bias', [output_shape], dtype=dtype)
        else:
            weights = vs.get_variable(
                'weights', [x.get_shape()[1], output_shape],
                initializer=init_ops.random_normal_initializer(init_mean,
                                                               init_stddev,
                                                               dtype=dtype),
                dtype=dtype)
            bias = vs.get_variable(
                'bias', [output_shape],
                initializer=init_ops.random_normal_initializer(init_mean,
                                                               init_stddev,
                                                               dtype=dtype),
                dtype=dtype)
        logging_ops.histogram_summary('%s.weights' % scope_name, weights)
        logging_ops.histogram_summary('%s.bias' % scope_name, bias)
        return losses_ops.mean_squared_error_regressor(x, y, weights, bias)
Example #2
0
def linear_regression(x, y, init_mean=None, init_stddev=1.0):
  """Creates linear regression TensorFlow subgraph.

  Args:
    x: tensor or placeholder for input features.
    y: tensor or placeholder for labels.
    init_mean: the mean value to use for initialization.
    init_stddev: the standard devation to use for initialization.

  Returns:
    Predictions and loss tensors.

  Side effects:
    The variables linear_regression.weights and linear_regression.bias are
    initialized as follows.  If init_mean is not None, then initialization
    will be done using a random normal initializer with the given init_mean
    and init_stddv.  (These may be set to 0.0 each if a zero initialization
    is desirable for convex use cases.)  If init_mean is None, then the
    uniform_unit_scaling_initialzer will be used.
  """
  with vs.variable_scope('linear_regression'):
    scope_name = vs.get_variable_scope().name
    summary.histogram('%s.x' % scope_name, x)
    summary.histogram('%s.y' % scope_name, y)
    dtype = x.dtype.base_dtype
    y_shape = y.get_shape()
    if len(y_shape) == 1:
      output_shape = 1
    else:
      output_shape = y_shape[1]
    # Set up the requested initialization.
    if init_mean is None:
      weights = vs.get_variable(
          'weights', [x.get_shape()[1], output_shape], dtype=dtype)
      bias = vs.get_variable('bias', [output_shape], dtype=dtype)
    else:
      weights = vs.get_variable(
          'weights', [x.get_shape()[1], output_shape],
          initializer=init_ops.random_normal_initializer(
              init_mean, init_stddev, dtype=dtype),
          dtype=dtype)
      bias = vs.get_variable(
          'bias', [output_shape],
          initializer=init_ops.random_normal_initializer(
              init_mean, init_stddev, dtype=dtype),
          dtype=dtype)
    summary.histogram('%s.weights' % scope_name, weights)
    summary.histogram('%s.bias' % scope_name, bias)
    return losses_ops.mean_squared_error_regressor(x, y, weights, bias)
Example #3
0
def linear_regression(X, y, init_mean=None, init_stddev=1.0):
    """Creates linear regression TensorFlow subgraph.

    Args:
        X: tensor or placeholder for input features.
        y: tensor or placeholder for target.
        init_mean: the mean value to use for initialization.
        init_stddev: the standard devation to use for initialization.

    Returns:
        Predictions and loss tensors.

    Side effects:
        The variables linear_regression.weights and linear_regression.bias are
        initialized as follows.  If init_mean is not None, then initialization
        will be done using a random normal initializer with the given init_mean
        and init_stddv.  (These may be set to 0.0 each if a zero initialization
        is desirable for convex use cases.)  If init_mean is None, then the
        uniform_unit_scaling_initialzer will be used.
    """
    with vs.variable_scope('linear_regression'):
        logging_ops.histogram_summary('linear_regression.X', X)
        logging_ops.histogram_summary('linear_regression.y', y)
        y_shape = y.get_shape()
        if len(y_shape) == 1:
            output_shape = 1
        else:
            output_shape = y_shape[1]
        # Set up the requested initialization.
        if (init_mean is None):
            weights = vs.get_variable('weights',
                                      [X.get_shape()[1], output_shape])
            bias = vs.get_variable('bias',
                                   [output_shape])
        else:
            weights = vs.get_variable('weights',
                                      [X.get_shape()[1], output_shape],
                                      initializer=init_ops.random_normal_initializer(
                                          init_mean, init_stddev))
            bias = vs.get_variable('bias',
                                   [output_shape],
                                   initializer=init_ops.random_normal_initializer(
                                       init_mean, init_stddev))
        logging_ops.histogram_summary('linear_regression.weights', weights)
        logging_ops.histogram_summary('linear_regression.bias', bias)
        return losses_ops.mean_squared_error_regressor(X, y, weights, bias)