Пример #1
0
 def testCalibratedLinearClassifier(self, feature_names, output_calibration,
                                    use_bias, auc):
   self._ResetAllBackends()
   feature_columns = [
       feature_column for feature_column in self.heart_feature_columns
       if feature_column.name in feature_names
   ]
   feature_configs = [
       feature_config for feature_config in self.heart_feature_configs
       if feature_config.name in feature_names
   ]
   model_config = configs.CalibratedLinearConfig(
       use_bias=use_bias,
       regularizer_configs=[
           configs.RegularizerConfig(name='output_calib_hessian', l2=1e-4),
       ],
       output_calibration=output_calibration,
       feature_configs=feature_configs)
   estimator = estimators.CannedClassifier(
       feature_columns=feature_columns,
       model_config=model_config,
       feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
       optimizer=tf.keras.optimizers.Adam(0.01))
   estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=200))
   results = estimator.evaluate(input_fn=self._GetHeartTestInputFn())
   logging.info('Calibrated linear classifier results:')
   logging.info(results)
   self.assertGreater(results['auc'], auc)
Пример #2
0
    def testCalibratedLatticeEnsembleModelInfo(self, lattices, num_lattices,
                                               lattice_rank, parameterization,
                                               separate_calibrators,
                                               output_calibration):
        self._ResetAllBackends()
        feature_configs = copy.deepcopy(self.heart_feature_configs)
        if lattices == 'rtl_layer' or parameterization == 'kronecker_factored':
            # RTL Layer only supports monotonicity and bound constraints.
            for feature_config in feature_configs:
                feature_config.lattice_size = 2
                feature_config.unimodality = 'none'
                feature_config.reflects_trust_in = None
                feature_config.dominates = None
                feature_config.regularizer_configs = None
        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=feature_configs,
            lattices=lattices,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
            parameterization=parameterization,
            separate_calibrators=separate_calibrators,
            output_calibration=output_calibration,
        )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            len(self.heart_feature_columns) +  # Input features
            num_lattices +  # One lattice per submodel
            1 +  # Averaging submodels
            int(output_calibration))  # Output calibration
        if separate_calibrators:
            expected_num_nodes += num_lattices * lattice_rank
        else:
            expected_num_nodes += len(self.heart_feature_columns)

        self.assertLen(model.nodes, expected_num_nodes)
Пример #3
0
 def testCalibratedLatticeEnsembleClassifier(self, feature_names, lattices,
                                             num_lattices, lattice_rank,
                                             separate_calibrators,
                                             output_calibration, auc):
     self._ResetAllBackends()
     feature_columns = [
         feature_column for feature_column in self.heart_feature_columns
         if feature_column.name in feature_names
     ]
     feature_configs = [
         feature_config for feature_config in self.heart_feature_configs
         if feature_config.name in feature_names
     ]
     if lattices == 'rtl_layer':
         # RTL Layer only supports monotonicity and bound constraints.
         feature_configs = copy.deepcopy(feature_configs)
         for feature_config in feature_configs:
             feature_config.lattice_size = 2
             feature_config.unimodality = 'none'
             feature_config.reflects_trust_in = None
             feature_config.dominates = None
             feature_config.regularizer_configs = None
     model_config = configs.CalibratedLatticeEnsembleConfig(
         regularizer_configs=[
             configs.RegularizerConfig(name='torsion', l2=1e-4),
             configs.RegularizerConfig(name='output_calib_hessian',
                                       l2=1e-4),
         ],
         feature_configs=feature_configs,
         lattices=lattices,
         num_lattices=num_lattices,
         lattice_rank=lattice_rank,
         separate_calibrators=separate_calibrators,
         output_calibration=output_calibration,
     )
     estimator = estimators.CannedClassifier(
         feature_columns=feature_columns,
         model_config=model_config,
         feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
         prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=50),
         optimizer=tf.keras.optimizers.Adam(0.01),
         prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
     estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=200))
     results = estimator.evaluate(input_fn=self._GetHeartTestInputFn())
     logging.info('Calibrated lattice ensemble classifier results:')
     logging.info(results)
     self.assertGreater(results['auc'], auc)
Пример #4
0
    def testCalibratedLatticeEnsembleModelInfo(self, num_lattices,
                                               lattice_rank,
                                               separate_calibrators,
                                               output_calibration):
        self._ResetAllBackends()
        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=self.heart_feature_configs,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
            separate_calibrators=separate_calibrators,
            output_calibration=output_calibration,
        )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            len(self.heart_feature_columns) +  # Input features
            num_lattices +  # One lattice per submodel
            1 +  # Averaging submodels
            int(output_calibration))  # Output calibration
        if separate_calibrators:
            expected_num_nodes += num_lattices * lattice_rank
        else:
            expected_num_nodes += len(self.heart_feature_columns)

        self.assertLen(model.nodes, expected_num_nodes)
Пример #5
0
    def testCalibratedModelInfo(self, model_type, output_calibration):
        self._ResetAllBackends()
        if model_type == 'linear':
            model_config = configs.CalibratedLinearConfig(
                feature_configs=self.heart_feature_configs,
                output_calibration=output_calibration,
            )
        else:
            model_config = configs.CalibratedLatticeConfig(
                feature_configs=self.heart_feature_configs,
                output_calibration=output_calibration,
            )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            2 * len(self.heart_feature_columns)
            +  # Input features and calibration
            1 +  # Linear or lattice layer
            int(output_calibration))  # Output calibration

        self.assertLen(model.nodes, expected_num_nodes)
Пример #6
0
 def testCalibratedLatticeEnsembleClassifier(self, feature_names, lattices,
                                             num_lattices, lattice_rank,
                                             separate_calibrators,
                                             output_calibration, auc):
     self._ResetAllBackends()
     feature_columns = [
         feature_column for feature_column in self.heart_feature_columns
         if feature_column.name in feature_names
     ]
     feature_configs = [
         feature_config for feature_config in self.heart_feature_configs
         if feature_config.name in feature_names
     ]
     model_config = configs.CalibratedLatticeEnsembleConfig(
         regularizer_configs=[
             configs.RegularizerConfig(name='torsion', l2=1e-4),
             configs.RegularizerConfig(name='output_calib_hessian',
                                       l2=1e-4),
         ],
         feature_configs=feature_configs,
         lattices=lattices,
         num_lattices=num_lattices,
         lattice_rank=lattice_rank,
         separate_calibrators=separate_calibrators,
         output_calibration=output_calibration,
     )
     estimator = estimators.CannedClassifier(
         feature_columns=feature_columns,
         model_config=model_config,
         feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
         prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=50),
         optimizer=tf.keras.optimizers.Adam(0.01),
         prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
     estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=200))
     results = estimator.evaluate(input_fn=self._GetHeartTestInputFn())
     logging.info('Calibrated lattice ensemble classifier results:')
     logging.info(results)
     self.assertGreater(results['auc'], auc)