def test_model_integration(self):
        features, labels = input_fn.get_input_fn(
            tf.estimator.ModeKeys.TRAIN, [self.input_data_dir],
            'label.in_hospital_death.class',
            sequence_features=[
                'Observation.code', 'Observation.value.quantity.value',
                'Observation.value.quantity.unit',
                'Observation.code.harmonized:valueset-observation-name'
            ],
            dense_sequence_feature='Observation.value.quantity.value',
            required_sequence_feature=
            'Observation.code.harmonized:valueset-observation-name',
            batch_size=2,
            shuffle=False)()
        num_steps = 2
        hparams = contrib_training.HParams(
            batch_size=2,
            learning_rate=0.008,
            sequence_features=[
                'deltaTime', 'Observation.code',
                'Observation.value.quantity.value'
            ],
            categorical_values=['loinc:4', 'loinc:6', 'loinc:1'],
            categorical_seq_feature='Observation.code',
            context_features=['sequenceLength'],
            feature_value='Observation.value.quantity.value',
            label_key='label.in_hospital_death.class',
            attribution_threshold=-1.0,
            rnn_size=6,
            variational_recurrent_keep_prob=1.1,
            variational_input_keep_prob=1.1,
            variational_output_keep_prob=1.1,
            sequence_prediction=False,
            time_decayed=False,
            normalize=True,
            momentum=0.9,
            min_value=-1000.0,
            max_value=1000.0,
            volatility_loss_factor=0.0,
            attribution_max_delta_time=100000,
            input_keep_prob=1.0,
            include_sequence_prediction=False,
            include_gradients_attribution=True,
            include_gradients_sum_time_attribution=False,
            include_gradients_avg_time_attribution=False,
            include_path_integrated_gradients_attribution=True,
            include_diff_sequence_prediction_attribution=False,
            use_rnn_attention=True,
            attention_hidden_layer_dim=5,
            path_integrated_gradients_num_steps=10,
        )
        model = osm.ObservationSequenceModel()
        model_fn = model.create_model_fn(hparams)
        with tf.variable_scope('test'):
            model_fn_ops_train = model_fn(features, labels,
                                          tf.estimator.ModeKeys.TRAIN)
        with tf.variable_scope('test', reuse=True):
            model_fn_ops_eval = model_fn(features,
                                         labels=None,
                                         mode=tf.estimator.ModeKeys.PREDICT)

        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.tables_initializer())
            # Test train.
            for i in range(num_steps):
                loss, _ = sess.run(
                    [model_fn_ops_train.loss, model_fn_ops_train.train_op])
                if i == 0:
                    initial_loss = loss
            self.assertLess(loss, initial_loss)
            # Test infer.
            sess.run(model_fn_ops_eval.predictions)
Exemple #2
0
    def testBasicModelFn(self, sequence_prediction, include_gradients,
                         include_gradients_sum_time,
                         include_gradients_avg_time,
                         include_path_integrated_gradients,
                         include_diff_sequence_prediction, use_rnn_attention,
                         attention_hidden_layer_dim, volatility_loss_factor):
        """This high-level tests ensures there are no errors during training.

    It also checks that the loss is decreasing.

    Args:
      sequence_prediction: Whether to consider the recent predictions in the
        loss or only the most last prediction.
      include_gradients: Whether to generate attribution with the
        gradients of the last predictions.
      include_gradients_sum_time: Whether to generate attribution
        with the gradients of the sum of the predictions over time.
      include_gradients_avg_time: Whether to generate attribution
        with the gradients of the average of the predictions over time.
      include_path_integrated_gradients: Whether to generate
        attribution with the integrated gradients of last predictions compared
        to their most recent values before attribution_max_delta_time.
      include_diff_sequence_prediction: Whether to
        generate attribution from the difference of consecutive predictions.
      use_rnn_attention: Whether to use attention for the RNN.
      attention_hidden_layer_dim: If use_rnn_attention what the dimensionality
        of a hidden layer should be (or 0 if none) of last output and
        intermediates before multiplying to obtain a weight.
      volatility_loss_factor: Include the sum of the changes in predictions
        across the sequence in the loss multiplied by this factor.
    """
        num_steps = 2
        hparams = contrib_training.HParams(
            batch_size=2,
            learning_rate=0.008,
            sequence_features=[
                'deltaTime', 'Observation.code',
                'Observation.valueQuantity.value'
            ],
            categorical_values=['loinc:1', 'loinc:2', 'MISSING'],
            categorical_seq_feature='Observation.code',
            context_features=['sequenceLength'],
            feature_value='Observation.valueQuantity.value',
            label_key='label.in_hospital_death',
            attribution_threshold=-1.0,
            rnn_size=6,
            variational_recurrent_keep_prob=1.1,
            variational_input_keep_prob=1.1,
            variational_output_keep_prob=1.1,
            sequence_prediction=sequence_prediction,
            time_decayed=False,
            normalize=True,
            momentum=0.9,
            min_value=-1000.0,
            max_value=1000.0,
            volatility_loss_factor=volatility_loss_factor,
            attribution_max_delta_time=100000,
            input_keep_prob=1.0,
            include_sequence_prediction=sequence_prediction,
            include_gradients_attribution=include_gradients,
            include_gradients_sum_time_attribution=include_gradients_sum_time,
            include_gradients_avg_time_attribution=include_gradients_avg_time,
            include_path_integrated_gradients_attribution=(
                include_path_integrated_gradients),
            include_diff_sequence_prediction_attribution=(
                include_diff_sequence_prediction),
            use_rnn_attention=use_rnn_attention,
            attention_hidden_layer_dim=attention_hidden_layer_dim,
            path_integrated_gradients_num_steps=10,
        )
        observation_values = tf.SparseTensor(
            indices=[[0, 0, 0], [0, 1, 0], [0, 2, 0], [1, 0, 0], [1, 1, 0],
                     [1, 2, 0]],
            values=[100.0, 2.3, 9999999.0, 0.5, 0.0, 4.0],
            dense_shape=[2, 3, 1])
        model = osm.ObservationSequenceModel()
        model_fn = model.create_model_fn(hparams)
        features = {
            input_fn.CONTEXT_KEY_PREFIX + 'sequenceLength':
            tf.constant([[2], [3]], dtype=tf.int64),
            input_fn.SEQUENCE_KEY_PREFIX + 'Observation.code':
            tf.SparseTensor(indices=observation_values.indices,
                            values=[
                                'loinc:2', 'loinc:1', 'loinc:2', 'loinc:1',
                                'MISSING', 'loinc:1'
                            ],
                            dense_shape=observation_values.dense_shape),
            input_fn.SEQUENCE_KEY_PREFIX + 'Observation.valueQuantity.value':
            observation_values,
            input_fn.SEQUENCE_KEY_PREFIX + 'deltaTime':
            tf.constant([[[1], [2], [0]], [[1], [3], [4]]], dtype=tf.int64)
        }
        label_key = 'label.in_hospital_death'
        labels = {label_key: tf.constant([[1.0], [0.0]], dtype=tf.float32)}
        with tf.variable_scope('test'):
            model_fn_ops_train = model_fn(features, labels,
                                          tf.estimator.ModeKeys.TRAIN)
        with tf.variable_scope('test', reuse=True):
            features[input_fn.CONTEXT_KEY_PREFIX +
                     'label.in_hospital_death'] = tf.SparseTensor(
                         indices=[[0, 0]],
                         values=['expired'],
                         dense_shape=[2, 1])
            model_fn_ops_eval = model_fn(features,
                                         labels=None,
                                         mode=tf.estimator.ModeKeys.PREDICT)

        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.tables_initializer())
            # Test train.
            for i in range(num_steps):
                loss, _ = sess.run(
                    [model_fn_ops_train.loss, model_fn_ops_train.train_op])
                if i == 0:
                    initial_loss = loss
            self.assertLess(loss, initial_loss)
            # Test infer.
            sess.run(model_fn_ops_eval.predictions)