Esempio n. 1
0
  def testProjection(self):
    """Check projection operator."""
    input_dim = 2
    monotonic_output_dim = 2
    non_monotonic_output_dim = 1
    # First five is monotonic, and the last five is non-monotonic.
    is_monotone = [True, False]
    input_placeholder = array_ops.placeholder(
        dtype=dtypes.float32, shape=[None, input_dim])
    packed_results = monotone_linear_layers.split_monotone_linear_layer(
        input_placeholder,
        input_dim=input_dim,
        monotonic_output_dim=monotonic_output_dim,
        non_monotonic_output_dim=non_monotonic_output_dim,
        is_monotone=is_monotone,
        init_weight_mean=-10.0,
        init_weight_stddev=0.0)
    (_, monotone_weights, _, non_monotone_weights, proj, _) = packed_results

    expected_pre_monotone_weights = [[-10.0, -10.0]] * 2
    expected_pre_non_monotone_weights = [[-10.0]]
    expected_projected_monotone_weights = [[0.0, -10.0]] * 2
    expected_projected_non_monotone_weights = [[-10.0]]

    with self.test_session() as sess:
      sess.run(variables.global_variables_initializer())
      self.assertAllClose(expected_pre_monotone_weights,
                          monotone_weights.eval())
      self.assertAllClose(expected_pre_non_monotone_weights,
                          non_monotone_weights.eval())
      sess.run(proj)
      self.assertAllClose(expected_projected_monotone_weights,
                          monotone_weights.eval())
      self.assertAllClose(expected_projected_non_monotone_weights,
                          non_monotone_weights.eval())
Esempio n. 2
0
 def testNoNonMonotonicInputsWithNonMonotonicOutputExpectFailure(self):
   input_dim = 2
   monotonic_output_dim = 2
   non_monotonic_output_dim = 2
   is_monotone = [True, True]
   input_placeholder = array_ops.placeholder(
       dtype=dtypes.float32, shape=[None, input_dim])
   with self.assertRaises(ValueError):
     _ = monotone_linear_layers.split_monotone_linear_layer(
         input_placeholder,
         input_dim=input_dim,
         monotonic_output_dim=monotonic_output_dim,
         non_monotonic_output_dim=non_monotonic_output_dim,
         is_monotone=is_monotone,
         init_weight_mean=-10.0,
         init_weight_stddev=0.0)
Esempio n. 3
0
 def testBooleanIsMonotoneExpectsError(self):
   """Test empty non monotonic output."""
   input_dim = 2
   monotonic_output_dim = 2
   non_monotonic_output_dim = 1
   is_monotone = True
   input_placeholder = array_ops.placeholder(
       dtype=dtypes.float32, shape=[None, input_dim])
   with self.assertRaises(ValueError):
     _ = monotone_linear_layers.split_monotone_linear_layer(
         input_placeholder,
         input_dim=input_dim,
         monotonic_output_dim=monotonic_output_dim,
         non_monotonic_output_dim=non_monotonic_output_dim,
         is_monotone=is_monotone,
         init_weight_mean=-10.0,
         init_weight_stddev=0.0)
Esempio n. 4
0
  def testEvaluation(self):
    """Create a split monotone linear layer and check the results."""
    batch_size = 5
    input_dim = 10
    monotonic_output_dim = 2
    non_monotonic_output_dim = 3
    # First five is monotonic, and the last five is non-monotonic.
    is_monotone = [True] * 5 + [False] * 5
    input_placeholder = array_ops.placeholder(
        dtype=dtypes.float32, shape=[batch_size, input_dim])
    packed_results = monotone_linear_layers.split_monotone_linear_layer(
        input_placeholder,
        input_dim=input_dim,
        monotonic_output_dim=monotonic_output_dim,
        non_monotonic_output_dim=non_monotonic_output_dim,
        is_monotone=is_monotone)
    (monotonic_output, _, non_monotonic_output, _, _, _) = packed_results

    # Check the shape of outputs.
    self.assertAllEqual(monotonic_output.shape,
                        [batch_size, monotonic_output_dim])
    self.assertAllEqual(non_monotonic_output.shape,
                        [batch_size, non_monotonic_output_dim])

    # Check monotonic inputs are not part of non_monotonic_output.
    # We do this by changing the first half of inputs and check whether it
    # changes the value or not.
    zero_input = [[0.0] * 10] * 5
    identity_in_monotone_inputs = [
        [1.0, 0.0, 0.0, 0.0, 0.0] + [0.0] * 5,
        [0.0, 1.0, 0.0, 0.0, 0.0] + [0.0] * 5,
        [0.0, 0.0, 1.0, 0.0, 0.0] + [0.0] * 5,
        [0.0, 0.0, 0.0, 1.0, 0.0] + [0.0] * 5,
        [0.0, 0.0, 0.0, 0.0, 1.0] + [0.0] * 5,
    ]

    with self.test_session() as sess:
      sess.run(variables.global_variables_initializer())
      non_monotonic_output_at_zero = sess.run(
          non_monotonic_output, feed_dict={input_placeholder: zero_input})
      non_monotonic_output_at_identity = sess.run(
          non_monotonic_output,
          feed_dict={input_placeholder: identity_in_monotone_inputs})

    self.assertAllClose(non_monotonic_output_at_zero,
                        non_monotonic_output_at_identity)
Esempio n. 5
0
 def testNoRegularizationExpectsNone(self):
   """Create a split monotone linear layer and check no regularization."""
   input_dim = 2
   monotonic_output_dim = 2
   non_monotonic_output_dim = 2
   is_monotone = [True, False]
   input_placeholder = array_ops.placeholder(
       dtype=dtypes.float32, shape=[None, input_dim])
   # We set the initial_weight_mean to -10.0.
   (_, _, _, _, _,
    regularization) = monotone_linear_layers.split_monotone_linear_layer(
        input_placeholder,
        input_dim=input_dim,
        monotonic_output_dim=monotonic_output_dim,
        non_monotonic_output_dim=non_monotonic_output_dim,
        is_monotone=is_monotone,
        init_weight_mean=-10.0,
        init_weight_stddev=0.0)
   self.assertIsNone(regularization)
Esempio n. 6
0
 def testZeroNonMonotonicOutputExpectEmptyNonMonotonicOutput(self):
   """Test empty non monotonic output."""
   input_dim = 2
   monotonic_output_dim = 2
   non_monotonic_output_dim = 0
   is_monotone = [True, True]
   input_placeholder = array_ops.placeholder(
       dtype=dtypes.float32, shape=[None, input_dim])
   packed_results = monotone_linear_layers.split_monotone_linear_layer(
       input_placeholder,
       input_dim=input_dim,
       monotonic_output_dim=monotonic_output_dim,
       non_monotonic_output_dim=non_monotonic_output_dim,
       is_monotone=is_monotone,
       init_weight_mean=-10.0,
       init_weight_stddev=0.0)
   (_, _, non_monotonic_outputs, non_monotonic_weights, _, _) = packed_results
   self.assertEqual(non_monotonic_outputs, None)
   self.assertEqual(non_monotonic_weights, None)
Esempio n. 7
0
 def testRegularization(self):
   """Create a split monotone linear layer and check regularization."""
   input_dim = 2
   monotonic_output_dim = 2
   non_monotonic_output_dim = 2
   is_monotone = [True, False]
   input_placeholder = array_ops.placeholder(
       dtype=dtypes.float32, shape=[None, input_dim])
   # We set the initial_weight_mean to -10.0.
   (_, _, _, _, _,
    regularization) = monotone_linear_layers.split_monotone_linear_layer(
        input_placeholder,
        input_dim=input_dim,
        monotonic_output_dim=monotonic_output_dim,
        non_monotonic_output_dim=non_monotonic_output_dim,
        is_monotone=is_monotone,
        init_weight_mean=-10.0,
        init_weight_stddev=0.0,
        l1_reg=0.1,
        l2_reg=0.1)
   with self.test_session() as sess:
     sess.run(variables.global_variables_initializer())
     self.assertAlmostEqual(66.0, sess.run(regularization), delta=1e-5)
Esempio n. 8
0
def _embedded_lattices(calibrated_input_tensor,
                       input_dim,
                       output_dim,
                       interpolation_type,
                       monotonic_num_lattices,
                       monotonic_lattice_rank,
                       monotonic_lattice_size,
                       non_monotonic_num_lattices,
                       non_monotonic_lattice_rank,
                       non_monotonic_lattice_size,
                       linear_embedding_calibration_min,
                       linear_embedding_calibration_max,
                       linear_embedding_calibration_num_keypoints,
                       is_monotone=None,
                       lattice_l1_reg=None,
                       lattice_l2_reg=None,
                       lattice_l1_torsion_reg=None,
                       lattice_l2_torsion_reg=None,
                       lattice_l1_laplacian_reg=None,
                       lattice_l2_laplacian_reg=None):
    """Creates an ensemble of lattices with a linear embedding.

  This function constructs the following deep lattice network:
  calibrated_input -> linear_embedding -> calibration -> ensemble of lattices.
  Then ensemble of lattices' output are averaged and bias term is added to make
  a final prediction.

  ensemble of lattices is consists of two parts: monotonic lattices and
  non-monotonic lattices. The input to the monotonic lattices is an output of
  linear_embedding that contains both monotonic and non-monotonic
  calibrated_input. All inputs to the monotonic lattices are set to be monotonic
  to preserve end-to-end monotonicity in the monotonic feature.
  The input to the non-monotonic lattices is an output of linear_embedding that
  only contains non-monotonic calibrated_input. All inputs to the non-monotonic
  lattices are set to be non-monotonic, since we do not need to guarantee
  monotonicity.

  Args:
    calibrated_input_tensor: [batch_size, input_dim] tensor.
    input_dim: (int) input dimnension.
    output_dim: (int) output dimension.
    interpolation_type: defines whether the lattice will interpolate using the
      full hypercube or only the simplex ("hyper-triangle") around the point
      being evaluated. Valid values: 'hypercube' or 'simplex'
    monotonic_num_lattices: (int) number of monotonic lattices in the ensemble
      lattices layer.
    monotonic_lattice_rank: (int) number of inputs to each monotonic lattice in
      the ensemble lattices layer.
    monotonic_lattice_size: (int) lattice cell size for each monotonic lattice
      in the ensemble lattices layer.
    non_monotonic_num_lattices: (int) number of non monotonic lattices in the
      ensemble lattices layer.
    non_monotonic_lattice_rank: (int) number of inputs to each non monotonic
      lattice in the ensemble lattices layer.
    non_monotonic_lattice_size: (int) lattice cell size for each non monotonic
      lattice in the ensemble lattices layer.
    linear_embedding_calibration_min: (float) a minimum input keypoints value
      for linear_embedding calibration.
    linear_embedding_calibration_max: (float) a maximum input keypoints value
      for linear_embedding calibration.
    linear_embedding_calibration_num_keypoints: (int) a number of eypoints for
      linear_embedding calibration.
    is_monotone: (bool, list of booleans) is_monotone[k] == true then
      calibrated_input_tensor[:, k] is considered to be a monotonic input.
    lattice_l1_reg: (float) lattice l1 regularization amount.
    lattice_l2_reg: (float) lattice l2 regularization amount.
    lattice_l1_torsion_reg: (float) lattice l1 torsion regularization amount.
    lattice_l2_torsion_reg: (float) lattice l2 torsion regularization amount.
    lattice_l1_laplacian_reg: (float) lattice l1 laplacian regularization
      amount.
    lattice_l2_laplacian_reg: (float) lattice l2 laplacian regularization
      amount.
  Returns:
    A tuple of (output_tensor, projection_ops, regularization).
  Raises:
    ValueError: If there is no non-monotonic inputs but
    non_monotonic_num_lattices is not zero.
  """
    projections = []
    regularization = None

    # Explictly assign number of lattices to zero for any empty cases.
    if not monotonic_num_lattices:
        monotonic_num_lattices = 0
    if not non_monotonic_num_lattices:
        non_monotonic_num_lattices = 0

    # Step 1. Create a linear embedding.
    if monotonic_num_lattices:
        monotonic_embedding_dim = monotonic_num_lattices * monotonic_lattice_rank
    else:
        monotonic_num_lattices = 0
        monotonic_embedding_dim = 0
    if non_monotonic_num_lattices:
        non_monotonic_embedding_dim = (non_monotonic_num_lattices *
                                       non_monotonic_lattice_rank)
    else:
        non_monotonic_num_lattices = 0
        non_monotonic_embedding_dim = 0

    if is_monotone is not None:
        is_monotone = tools.cast_to_list(is_monotone, input_dim, 'is_monotone')
    with variable_scope.variable_scope('linear_embedding'):
        packed_results = monotone_linear_layers.split_monotone_linear_layer(
            calibrated_input_tensor,
            input_dim,
            monotonic_embedding_dim,
            non_monotonic_embedding_dim,
            is_monotone=is_monotone)
        (monotonic_output, _, non_monotonic_output, _, proj,
         _) = packed_results
        if proj is not None:
            projections.append(proj)

    # Step 2. Create ensemble of monotonic lattices.
    if monotonic_num_lattices == 0:
        m_lattice_outputs = None
    else:
        with variable_scope.variable_scope('monotonic_lattices'):
            m_lattice_outputs, projs, reg = _ensemble_lattices_layer(
                monotonic_output,
                monotonic_embedding_dim,
                output_dim,
                interpolation_type,
                linear_embedding_calibration_min,
                linear_embedding_calibration_max,
                linear_embedding_calibration_num_keypoints,
                monotonic_num_lattices,
                monotonic_lattice_rank,
                monotonic_lattice_size,
                is_monotone=True,
                l1_reg=lattice_l1_reg,
                l2_reg=lattice_l2_reg,
                l1_torsion_reg=lattice_l1_torsion_reg,
                l2_torsion_reg=lattice_l2_torsion_reg,
                l1_laplacian_reg=lattice_l1_laplacian_reg,
                l2_laplacian_reg=lattice_l2_laplacian_reg)
            if projs:
                projections += projs
            regularization = tools.add_if_not_none(regularization, reg)

    # Step 3. Construct non-monotonic ensembles.
    if non_monotonic_output is None and non_monotonic_num_lattices > 0:
        raise ValueError(
            'All input signals are monotonic but the number of non monotonic '
            'lattices is not zero.')
    if non_monotonic_num_lattices == 0:
        n_lattice_outputs = None
    else:
        with variable_scope.variable_scope('non_monotonic_lattices'):
            n_lattice_outputs, projs, reg = _ensemble_lattices_layer(
                non_monotonic_output,
                non_monotonic_embedding_dim,
                output_dim,
                interpolation_type,
                linear_embedding_calibration_min,
                linear_embedding_calibration_max,
                linear_embedding_calibration_num_keypoints,
                non_monotonic_num_lattices,
                non_monotonic_lattice_rank,
                non_monotonic_lattice_size,
                is_monotone=False,
                l1_reg=lattice_l1_reg,
                l2_reg=lattice_l2_reg,
                l1_torsion_reg=lattice_l1_torsion_reg,
                l2_torsion_reg=lattice_l2_torsion_reg,
                l1_laplacian_reg=lattice_l1_laplacian_reg,
                l2_laplacian_reg=lattice_l2_laplacian_reg)
            if projs:
                projections += projs
            regularization = tools.add_if_not_none(regularization, reg)

    # Step 4. Take average to make a final prediction.
    with variable_scope.variable_scope('ensemble_average'):
        output = variable_scope.get_variable(
            name='ensemble_bias',
            initializer=[0.0] * output_dim,
            dtype=calibrated_input_tensor.dtype)
        if m_lattice_outputs:
            output += math_ops.divide(math_ops.add_n(m_lattice_outputs),
                                      monotonic_num_lattices)
        if n_lattice_outputs is not None:
            output += math_ops.divide(math_ops.add_n(n_lattice_outputs),
                                      non_monotonic_num_lattices)

    return (output, projections, regularization)