def testEmptyNonMonotonicLatticeSizeExpectsError(self): hparams = tfl_hparams.CalibratedEtlHParams(feature_names=['x']) hparams.set_param('non_monotonic_num_lattices', 2) hparams.set_param('non_monotonic_lattice_rank', 2) with self.assertRaisesRegexp( ValueError, 'Hyperparameter configuration cannot be used in the calibrated etl ' 'estimator.'): calibrated_etl.calibrated_etl_classifier(hparams=hparams)
def testWrongLatticeRegularization(self): hparams = tfl_hparams.CalibratedEtlHParams(feature_names=['x']) hparams.set_param('non_monotonic_num_lattices', 2) hparams.set_param('non_monotonic_lattice_size', 2) hparams.set_param('nno_monotonic_lattice_rank', 2) hparams.set_feature_param('x', 'lattice_l1_reg', 0.1) hparams.set_feature_param('x', 'lattice_l2_reg', 0.1) hparams.set_feature_param('x', 'lattice_l1_torsion_reg', 0.1) hparams.set_feature_param('x', 'lattice_l1_torsion_reg', 0.1) with self.assertRaisesRegexp( ValueError, 'Hyperparameter configuration cannot be used in the calibrated etl ' 'estimator.'): calibrated_etl.calibrated_etl_classifier(hparams=hparams)
def _CalibratedEtlClassifier(self, feature_columns, **hparams_args): def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( _NUM_KEYPOINTS, -1., 1., 0., 1.) hparams = tfl_hparams.CalibratedEtlHParams( num_keypoints=_NUM_KEYPOINTS, monotonic_num_lattices=1, monotonic_lattice_rank=1, monotonic_lattice_size=2, non_monotonic_num_lattices=1, non_monotonic_lattice_rank=1, non_monotonic_lattice_size=2, **hparams_args) # Turn off monotonic calibrator. hparams.set_param('calibration_monotonic', None) hparams.set_param('learning_rate', 0.1) return calibrated_etl.calibrated_etl_classifier( feature_columns=feature_columns, hparams=hparams, keypoints_initializers_fn=init_fn)
def testCalibratedEtlMonotonicClassifierTraining(self): # Construct the following training pair. # # Training: (x, y) # ([0., 0.], 0.0) # ([0., 1.], 1.0) # ([1., 0.], 1.0) # ([1., 1.], 0.0) # # which is not a monotonic function. Then check the forcing monotonicity # resulted in the following monotonicity or not. # f(0, 0) <= f(0, 1), f(0, 0) <= f(1, 0), f(0, 1) <= f(1, 1), # f(1, 0) < = f(1, 1). x0 = np.array([0.0, 0.0, 1.0, 1.0]) x1 = np.array([0.0, 1.0, 0.0, 1.0]) x_samples = {'x0': x0, 'x1': x1} training_y = np.array([[False], [True], [True], [False]]) train_input_fn = numpy_io.numpy_input_fn(x=x_samples, y=training_y, batch_size=4, num_epochs=1000, shuffle=False) test_input_fn = numpy_io.numpy_input_fn(x=x_samples, y=None, shuffle=False) # Define monotonic lattice classifier. feature_columns = [ feature_column_lib.numeric_column('x0'), feature_column_lib.numeric_column('x1'), ] def init_fn(): return keypoints_initialization.uniform_keypoints_for_signal( 2, 0., 1., 0., 1.) hparams = tfl_hparams.CalibratedEtlHParams(num_keypoints=2, monotonic_num_lattices=2, monotonic_lattice_rank=2, monotonic_lattice_size=2) hparams.set_param('calibration_monotonic', +1) hparams.set_param('lattice_monotonic', True) hparams.set_param('learning_rate', 0.1) estimator = calibrated_etl.calibrated_etl_classifier( feature_columns=feature_columns, hparams=hparams, keypoints_initializers_fn=init_fn) estimator.train(input_fn=train_input_fn) predictions = [ results['logits'][0] for results in estimator.predict(input_fn=test_input_fn) ] self.assertEqual(len(predictions), 4) # Check monotonicity. Note that projection has its own precision, so we # add a small number. self.assertLess(predictions[0], predictions[1] + 1e-6) self.assertLess(predictions[0], predictions[2] + 1e-6) self.assertLess(predictions[1], predictions[3] + 1e-6) self.assertLess(predictions[2], predictions[3] + 1e-6)