示例#1
0
    def testCalibratedLatticeEnsembleModelInfo(self, lattices, num_lattices,
                                               lattice_rank, parameterization,
                                               separate_calibrators,
                                               output_calibration):
        self._ResetAllBackends()
        feature_configs = copy.deepcopy(self.heart_feature_configs)
        if lattices == 'rtl_layer' or parameterization == 'kronecker_factored':
            # RTL Layer only supports monotonicity and bound constraints.
            for feature_config in feature_configs:
                feature_config.lattice_size = 2
                feature_config.unimodality = 'none'
                feature_config.reflects_trust_in = None
                feature_config.dominates = None
                feature_config.regularizer_configs = None
        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=feature_configs,
            lattices=lattices,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
            parameterization=parameterization,
            separate_calibrators=separate_calibrators,
            output_calibration=output_calibration,
        )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            len(self.heart_feature_columns) +  # Input features
            num_lattices +  # One lattice per submodel
            1 +  # Averaging submodels
            int(output_calibration))  # Output calibration
        if separate_calibrators:
            expected_num_nodes += num_lattices * lattice_rank
        else:
            expected_num_nodes += len(self.heart_feature_columns)

        self.assertLen(model.nodes, expected_num_nodes)
示例#2
0
    def testCalibratedLatticeEnsembleFix2dConstraintViolations(
            self, feature_names, lattices, num_lattices, lattice_rank,
            expected_lattices):
        self._ResetAllBackends()
        feature_columns = [
            feature_column for feature_column in self.boston_feature_columns
            if feature_column.name in feature_names
        ]
        feature_configs = [
            feature_config for feature_config in self.boston_feature_configs
            if feature_config.name in feature_names
        ]

        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=feature_configs,
            lattices=lattices,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
        )
        estimator = estimators.CannedRegressor(
            feature_columns=feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetBostonTrainInputFn(
                num_epochs=1),
            prefitting_input_fn=self._GetBostonTrainInputFn(num_epochs=50),
            optimizer=tf.keras.optimizers.Adam(0.05),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.05))
        estimator.train(input_fn=self._GetBostonTrainInputFn(num_epochs=200))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)
        lattices = []
        for node in model.nodes:
            if isinstance(node, model_info.LatticeNode):
                lattices.append([
                    input_node.input_node.name
                    for input_node in node.input_nodes
                ])

        self.assertLen(lattices, len(expected_lattices))
        for lattice, expected_lattice in zip(lattices, expected_lattices):
            self.assertCountEqual(lattice, expected_lattice)
示例#3
0
    def testCalibratedLatticeEnsembleModelInfo(self, num_lattices,
                                               lattice_rank,
                                               separate_calibrators,
                                               output_calibration):
        self._ResetAllBackends()
        model_config = configs.CalibratedLatticeEnsembleConfig(
            feature_configs=self.heart_feature_configs,
            num_lattices=num_lattices,
            lattice_rank=lattice_rank,
            separate_calibrators=separate_calibrators,
            output_calibration=output_calibration,
        )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            len(self.heart_feature_columns) +  # Input features
            num_lattices +  # One lattice per submodel
            1 +  # Averaging submodels
            int(output_calibration))  # Output calibration
        if separate_calibrators:
            expected_num_nodes += num_lattices * lattice_rank
        else:
            expected_num_nodes += len(self.heart_feature_columns)

        self.assertLen(model.nodes, expected_num_nodes)
示例#4
0
    def testCalibratedModelInfo(self, model_type, output_calibration):
        self._ResetAllBackends()
        if model_type == 'linear':
            model_config = configs.CalibratedLinearConfig(
                feature_configs=self.heart_feature_configs,
                output_calibration=output_calibration,
            )
        else:
            model_config = configs.CalibratedLatticeConfig(
                feature_configs=self.heart_feature_configs,
                output_calibration=output_calibration,
            )
        estimator = estimators.CannedClassifier(
            feature_columns=self.heart_feature_columns,
            model_config=model_config,
            feature_analysis_input_fn=self._GetHeartTrainInputFn(num_epochs=1),
            prefitting_input_fn=self._GetHeartTrainInputFn(num_epochs=5),
            optimizer=tf.keras.optimizers.Adam(0.01),
            prefitting_optimizer=tf.keras.optimizers.Adam(0.01))
        estimator.train(input_fn=self._GetHeartTrainInputFn(num_epochs=20))

        # Serving input fn is used to create saved models.
        serving_input_fn = (
            tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec=fc.make_parse_example_spec(
                    self.heart_feature_columns)))
        saved_model_path = estimator.export_saved_model(
            estimator.model_dir, serving_input_fn)
        logging.info('Model exported to %s', saved_model_path)
        model = estimators.get_model_graph(saved_model_path)

        expected_num_nodes = (
            2 * len(self.heart_feature_columns)
            +  # Input features and calibration
            1 +  # Linear or lattice layer
            int(output_calibration))  # Output calibration

        self.assertLen(model.nodes, expected_num_nodes)