Ejemplo n.º 1
0
 def testCalibratedLinearRegressor(self, feature_names, output_calibration,
                                   use_bias, average_loss):
   self._ResetAllBackends()
   feature_columns = [
       feature_column for feature_column in self.boston_feature_columns
       if feature_column.name in feature_names
   ]
   feature_configs = [
       feature_config for feature_config in self.boston_feature_configs
       if feature_config.name in feature_names
   ]
   model_config = configs.CalibratedLinearConfig(
       use_bias=use_bias,
       regularizer_configs=[
           configs.RegularizerConfig(name='output_calib_hessian', l2=1e-4),
       ],
       output_calibration=output_calibration,
       feature_configs=feature_configs)
   estimator = estimators.CannedRegressor(
       feature_columns=feature_columns,
       model_config=model_config,
       feature_analysis_input_fn=self._GetBostonTrainInputFn(num_epochs=1),
       optimizer=tf.keras.optimizers.Adam(0.01))
   estimator.train(input_fn=self._GetBostonTrainInputFn(num_epochs=200))
   results = estimator.evaluate(input_fn=self._GetBostonTestInputFn())
   logging.info('Calibrated linear regressor results:')
   logging.info(results)
   self.assertLess(results['average_loss'], average_loss)
Ejemplo n.º 2
0
 def testCalibratedLatticeEnsembleRegressor(self, feature_names, lattices,
                                            num_lattices, lattice_rank,
                                            separate_calibrators,
                                            output_calibration,
                                            average_loss):
     self._ResetAllBackends()
     feature_columns = [
         feature_column for feature_column in self.boston_feature_columns
         if feature_column.name in feature_names
     ]
     feature_configs = [
         feature_config for feature_config in self.boston_feature_configs
         if feature_config.name in feature_names
     ]
     if lattices == 'rtl_layer':
         # RTL Layer only supports monotonicity and bound constraints.
         feature_configs = copy.deepcopy(feature_configs)
         for feature_config in feature_configs:
             feature_config.lattice_size = 2
             feature_config.unimodality = 'none'
             feature_config.reflects_trust_in = None
             feature_config.dominates = None
             feature_config.regularizer_configs = None
     model_config = configs.CalibratedLatticeEnsembleConfig(
         regularizer_configs=[
             configs.RegularizerConfig(name='torsion', l2=1e-5),
             configs.RegularizerConfig(name='output_calib_hessian',
                                       l2=1e-5),
         ],
         feature_configs=feature_configs,
         lattices=lattices,
         num_lattices=num_lattices,
         lattice_rank=lattice_rank,
         separate_calibrators=separate_calibrators,
         output_calibration=output_calibration,
     )
     estimator = estimators.CannedRegressor(
         feature_columns=feature_columns,
         model_config=model_config,
         feature_analysis_input_fn=self._GetBostonTrainInputFn(
             num_epochs=1),
         prefitting_input_fn=self._GetBostonTrainInputFn(num_epochs=50),
         optimizer=tf.keras.optimizers.Adam(0.05),
         prefitting_optimizer=tf.keras.optimizers.Adam(0.05))
     estimator.train(input_fn=self._GetBostonTrainInputFn(num_epochs=200))
     results = estimator.evaluate(input_fn=self._GetBostonTestInputFn())
     logging.info('Calibrated lattice ensemble regressor results:')
     logging.info(results)
     self.assertLess(results['average_loss'], average_loss)
Ejemplo n.º 3
0
    def testCalibratedLatticeEnsembleFix2dConstraintViolations(
            self, feature_names, lattices, num_lattices, lattice_rank,
            expected_lattices):
        self._ResetAllBackends()
        feature_columns = [
            feature_column for feature_column in self.boston_feature_columns
            if feature_column.name in feature_names
        ]
        feature_configs = [
            feature_config for feature_config in self.boston_feature_configs
            if feature_config.name in feature_names
        ]

        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=feature_configs,
            lattices=lattices,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
        )
        estimator = estimators.CannedRegressor(
            feature_columns=feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetBostonTrainInputFn(
                num_epochs=1),
            prefitting_input_fn=self._GetBostonTrainInputFn(num_epochs=50),
            optimizer=tf.keras.optimizers.Adam(0.05),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.05))
        estimator.train(input_fn=self._GetBostonTrainInputFn(num_epochs=200))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)
        lattices = []
        for node in model.nodes:
            if isinstance(node, model_info.LatticeNode):
                lattices.append([
                    input_node.input_node.name
                    for input_node in node.input_nodes
                ])

        self.assertLen(lattices, len(expected_lattices))
        for lattice, expected_lattice in zip(lattices, expected_lattices):
            self.assertCountEqual(lattice, expected_lattice)
Ejemplo n.º 4
0
 def testCalibratedLatticeEnsembleRegressor(self, feature_names, lattices,
                                            num_lattices, lattice_rank,
                                            separate_calibrators,
                                            output_calibration,
                                            average_loss):
     self._ResetAllBackends()
     feature_columns = [
         feature_column for feature_column in self.boston_feature_columns
         if feature_column.name in feature_names
     ]
     feature_configs = [
         feature_config for feature_config in self.boston_feature_configs
         if feature_config.name in feature_names
     ]
     model_config = configs.CalibratedLatticeEnsembleConfig(
         regularizer_configs=[
             configs.RegularizerConfig(name='torsion', l2=1e-5),
             configs.RegularizerConfig(name='output_calib_hessian',
                                       l2=1e-5),
         ],
         feature_configs=feature_configs,
         lattices=lattices,
         num_lattices=num_lattices,
         lattice_rank=lattice_rank,
         separate_calibrators=separate_calibrators,
         output_calibration=output_calibration,
     )
     estimator = estimators.CannedRegressor(
         feature_columns=feature_columns,
         model_config=model_config,
         feature_analysis_input_fn=self._GetBostonTrainInputFn(
             num_epochs=1),
         prefitting_input_fn=self._GetBostonTrainInputFn(num_epochs=50),
         optimizer=tf.keras.optimizers.Adam(0.05),
         prefitting_optimizer=tf.keras.optimizers.Adam(0.05))
     estimator.train(input_fn=self._GetBostonTrainInputFn(num_epochs=200))
     results = estimator.evaluate(input_fn=self._GetBostonTestInputFn())
     logging.info('Calibrated lattice ensemble regressor results:')
     logging.info(results)
     self.assertLess(results['average_loss'], average_loss)