def _CalibratedRtlRegressor(self, feature_names, feature_columns, num_lattices=1, lattice_rank=1, num_keypoints=_NUM_KEYPOINTS, weight_column=None, **hparams_args): def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( num_keypoints, -1., 1., 0., 1.) hparams = tfl_hparams.CalibratedRtlHParams(feature_names, num_keypoints=num_keypoints, num_lattices=num_lattices, lattice_rank=lattice_rank, **hparams_args) # Turn off monotonic calibrator. hparams.set_param('calibration_monotonic', None) hparams.set_param('learning_rate', 0.1) return calibrated_rtl.calibrated_rtl_regressor( feature_columns=feature_columns, weight_column=weight_column, hparams=hparams, keypoints_initializers_fn=init_fn)
def __init__(self, n_classes, feature_columns=None, model_dir=None, quantiles_dir=None, keypoints_initializers_fn=None, lattice_initializers_fn=None, optimizer=None, config=None, hparams=None, head=None, weight_column=None): """Construct CalibrateRtlClassifier/Regressor.""" if not hparams: hparams = tfl_hparams.CalibratedRtlHParams([]) self.check_hparams(hparams) hparams = self._adjust_calibration_params(hparams) self.lattice_initializers_fn_ = lattice_initializers_fn super(_CalibratedRtl, self).__init__(n_classes, feature_columns, model_dir, quantiles_dir, keypoints_initializers_fn, optimizer, config, hparams, head, weight_column, 'rtl') self._structure_file = os.path.join(self._model_dir, _RTL_STRUCTURE_FILE)
def testCalibratedRtlMonotonicClassifierTraining(self): # Construct the following training/testing pair. # # Training: (x, y) # ([0., 0.], 0.0) # ([0., 1.], 1.0) # ([1., 0.], 1.0) # ([1., 1.], 0.0) # # Test: (x, y) # ([0., 0.], 0.0) # ([0., 1.], 1.0) # ([1., 0.], 1.0) # ([1., 1.], 1.0) # # Note that training example has a noisy sample, ([1., 1.], 0.0), and test # examples are generated by the logical-OR function. Therefore by enforcing # increasing monotonicity to all features, we should be able to work well # in the test examples. x0 = np.array([0.0, 0.0, 1.0, 1.0]) x1 = np.array([0.0, 1.0, 0.0, 1.0]) x_samples = {'x0': x0, 'x1': x1} training_y = np.array([[False], [True], [True], [False]]) test_y = np.array([[False], [True], [True], [True]]) train_input_fn = numpy_io.numpy_input_fn( x=x_samples, y=training_y, batch_size=4, num_epochs=1000, shuffle=False) test_input_fn = numpy_io.numpy_input_fn( x=x_samples, y=test_y, shuffle=False) # Define monotonic lattice classifier. feature_columns = [ feature_column_lib.numeric_column('x0'), feature_column_lib.numeric_column('x1'), ] def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( 2, 0., 1., 0., 1.) hparams = tfl_hparams.CalibratedRtlHParams( num_keypoints=2, num_lattices=3, lattice_rank=2) # Monotonic calibrated lattice. hparams.set_param('monotonicity', +1) hparams.set_param('learning_rate', 0.1) hparams.set_param('interpolation_type', 'hypercube') estimator = calibrated_rtl.calibrated_rtl_classifier( feature_columns=feature_columns, hparams=hparams, keypoints_initializers_fn=init_fn) estimator.train(input_fn=train_input_fn) results = estimator.evaluate(input_fn=test_input_fn) # We should expect 1.0 accuracy. self.assertGreater(results['accuracy'], 0.999)
def setUp(self): self.hparams = tfl_hparams.CalibratedRtlHParams(feature_names=['x']) self.hparams.set_param('lattice_size', 2) self.hparams.set_param('calibrator_output_min', 0) self.hparams.set_param('calibrator_output_max', 1) self.hparams.set_param('calibration_bound', True) self.hparams.set_param('lattice_rank', 2) self.hparams.set_param('num_lattices', 10) self.empty_estimator = calibrated_rtl.calibrated_rtl_classifier( hparams=self.hparams)
def testConstructorsAllTypes(self): _ = hparams.CalibratedHParams(['x0', 'x1']) _ = hparams.CalibratedLinearHParams(['x0', 'x1'], learning_rate=0.1) _ = hparams.CalibratedLatticeHParams(['x0', 'x1'], learning_rate=0.1) _ = hparams.CalibratedRtlHParams(['x0', 'x1'], learning_rate=0.1) etl = hparams.CalibratedEtlHParams(['x0', 'x1'], learning_rate=0.1) etl.parse('calibration_bound=yes') self.assertTrue(etl.calibration_bound) etl.parse('calibration_bound=off') self.assertFalse(etl.calibration_bound) with self.assertRaises(ValueError): etl.parse('calibration_bound=foobar')
def testCalibratedRtlWithMissingTraining(self): # x0 is missing with it's own vertex: so it can take very different values, # while x1 is missing and calibrated, in this case to the middle of the # lattice. x0 = np.array([0., 0., 1., 1., -1., -1., 0., 1.]) x1 = np.array([0., 1., 0., 1., 0., 1., -1., -1.]) training_y = np.array([1., 3., 7., 11., 23., 27., 2., 9.]) x_samples = {'x0': x0, 'x1': x1} train_input_fn = numpy_io.numpy_input_fn( x=x_samples, y=training_y, batch_size=x0.shape[0], num_epochs=2000, shuffle=False) test_input_fn = numpy_io.numpy_input_fn( x=x_samples, y=training_y, shuffle=False) feature_columns = [ feature_column_lib.numeric_column('x0'), feature_column_lib.numeric_column('x1'), ] def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( 2, 0., 1., 0., 1.) hparams = tfl_hparams.CalibratedRtlHParams( ['x0', 'x1'], num_keypoints=2, num_lattices=3, lattice_rank=2, learning_rate=0.1, missing_input_value=-1.) hparams.set_feature_param('x0', 'missing_vertex', True) estimator = calibrated_rtl.calibrated_rtl_regressor( feature_columns=feature_columns, hparams=hparams, keypoints_initializers_fn=init_fn) estimator.train(input_fn=train_input_fn) results = estimator.evaluate(input_fn=test_input_fn) self.assertLess(results['average_loss'], 0.1)
def _CalibratedRtlClassifier(self, feature_columns, num_lattices=1, lattice_rank=1, **hparams_args): def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( _NUM_KEYPOINTS, -1., 1., 0., 1.) hparams = tfl_hparams.CalibratedRtlHParams( num_keypoints=_NUM_KEYPOINTS, num_lattices=num_lattices, lattice_rank=lattice_rank, **hparams_args) # Turn off monotonic calibrator. hparams.set_param('calibration_monotonic', None) hparams.set_param('learning_rate', 0.1) return scrtl.separately_calibrated_rtl_classifier( feature_columns=feature_columns, hparams=hparams, keypoints_initializers_fn=init_fn)