Beispiel #1
0
def _get_params(paths, max_iters):
    """
    Get the various parameter for model initialization.
    :param paths: An AllPaths namedtuple.
    :return: Three different parameter sets.
    """
    base_training_params = setup_fake_base_training_params(
        training_stage=constants.FIXED_EFFECT)
    base_training_params.training_output_dir = paths.training_score_path
    base_training_params.validation_output_dir = paths.validation_score_path

    schema_params = setup_fake_schema_params()

    raw_model_params = [
        '--' + constants.FEATURE_BAGS, 'global',
        '--' + constants.TRAIN_DATA_PATH, paths.training_data_path,
        '--' + constants.VALIDATION_DATA_PATH, paths.validation_data_path,
        '--' + constants.METADATA_FILE, paths.metadata_file,
        '--' + constants.FEATURE_FILE, paths.feature_file,
        '--' + constants.NUM_OF_LBFGS_ITERATIONS, f"{max_iters}",
        '--' + constants.MODEL_OUTPUT_DIR, paths.model_output_dir,
        '--' + constants.COPY_TO_LOCAL, 'False', '--' + constants.BATCH_SIZE,
        '16', '--' + constants.L2_REG_WEIGHT, f"{_L2_REG_WEIGHT}",
        "--" + constants.REGULARIZE_BIAS, 'True',
        "--" + constants.DELAYED_EXIT_IN_SECONDS, '1'
    ]
    return base_training_params, schema_params, raw_model_params
    def test_train_should_fail_if_producer_or_consumer_fails(self):

        # Create raw params with fake partition entity
        base_training_params, raw_params = self.get_raw_params(
            partition_entity='fake_partition_entity')
        avro_model_output_dir = tempfile.mkdtemp()
        raw_params.extend(
            ['--' + constants.MODEL_OUTPUT_DIR, avro_model_output_dir])

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)
        assert re_lr_model

        checkpoint_dir = tempfile.mkdtemp()
        training_context = {
            constants.PARTITION_INDEX: 0,
            constants.PASSIVE_TRAINING_DATA_PATH: test_dataset_path
        }
        schema_params = setup_fake_schema_params()

        # Training should fail as partition entity doesnt exist in dataset
        with self.assertRaises(Exception):
            re_lr_model.train(training_data_path=test_dataset_path,
                              validation_data_path=test_dataset_path,
                              metadata_file=os.path.join(
                                  test_dataset_path, "data.json"),
                              checkpoint_path=checkpoint_dir,
                              execution_context=training_context,
                              schema_params=schema_params)
        tf.io.gfile.rmtree(checkpoint_dir)
        tf.io.gfile.rmtree(avro_model_output_dir)
def _get_params(paths, max_iters, intercept_only):
    """
    Get the various parameter for model initialization.
    :param paths: An AllPaths namedtuple.
    :param max_iters: maximum l-BFGS iterations.
    :param intercept_only: whether the model has intercept only, no other features.
    :return: Three different parameter sets.
    """
    base_training_params = setup_fake_base_training_params(
        training_stage=constants.FIXED_EFFECT)
    base_training_params.training_score_dir = paths.training_score_dir
    base_training_params.validation_score_dir = paths.validation_score_dir

    schema_params = setup_fake_schema_params()

    raw_model_params = [
        '--' + constants.TRAINING_DATA_DIR, paths.training_data_dir,
        '--' + constants.VALIDATION_DATA_DIR, paths.validation_data_dir,
        '--' + constants.METADATA_FILE, paths.metadata_file,
        '--' + constants.NUM_OF_LBFGS_ITERATIONS, f"{max_iters}",
        '--' + constants.OUTPUT_MODEL_DIR, paths.output_model_dir,
        '--' + constants.COPY_TO_LOCAL, 'False', '--' + constants.BATCH_SIZE,
        '16', '--' + constants.L2_REG_WEIGHT, f"{_L2_REG_WEIGHT}",
        "--" + constants.REGULARIZE_BIAS, 'True',
        "--" + constants.DELAYED_EXIT_IN_SECONDS, '1'
    ]
    if not intercept_only:
        raw_model_params.extend([
            '--' + constants.FEATURE_BAG, 'global',
            '--' + constants.FEATURE_FILE, paths.feature_file
        ])
    return base_training_params, schema_params, raw_model_params
def _get_params(paths, max_iters, intercept_only, has_validation_data_dir=True,
                disable_fixed_effect_scoring_after_training=False, has_intercept=True,
                model_type=constants.LOGISTIC_REGRESSION,
                fixed_effect_variance_mode=None, l2_reg_weight=_L2_REG_WEIGHT):
    """
    Get the various parameter for model initialization.
    :param paths: An AllPaths namedtuple.
    :param max_iters: maximum l-BFGS iterations.
    :param intercept_only: whether the model has intercept only, no other features.
    :param has_validation_data_dir: whether to use validation data
    :param disable_fixed_effect_scoring_after_training: whether to disable scoring
    :param has_intercept: whether to include intercept in the model
    :param model_type: the type of linear model to use (e.g, "linear_regression", "logistic_regression", etc.)
    :param fixed_effect_variance_mode: fixed effect variance mode, support "None", "FULL" and "SIMPLE".
    :param l2_reg_weight: l2 regularization weight.
    :return: Three different parameter sets.
    """
    base_training_params = setup_fake_base_training_params(training_stage=constants.FIXED_EFFECT,
                                                           model_type=model_type)
    base_training_params.training_score_dir = paths.training_score_dir
    base_training_params.validation_score_dir = paths.validation_score_dir

    schema_params = setup_fake_schema_params()

    raw_model_params = ['--' + constants.TRAINING_DATA_DIR, paths.training_data_dir,
                        '--' + constants.METADATA_FILE, paths.metadata_file,
                        '--' + constants.NUM_OF_LBFGS_ITERATIONS, f"{max_iters}",
                        '--' + constants.OUTPUT_MODEL_DIR, paths.output_model_dir,
                        '--' + constants.COPY_TO_LOCAL, 'False',
                        '--' + constants.BATCH_SIZE, '16',
                        '--' + constants.L2_REG_WEIGHT, f"{l2_reg_weight}",
                        "--" + constants.REGULARIZE_BIAS, 'True',
                        "--" + constants.DELAYED_EXIT_IN_SECONDS, '1']

    if has_validation_data_dir:
        raw_model_params.extend(['--' + constants.VALIDATION_DATA_DIR, paths.validation_data_dir])

    if disable_fixed_effect_scoring_after_training:
        raw_model_params.extend(['--disable_fixed_effect_scoring_after_training', 'True'])

    if not intercept_only:
        raw_model_params.extend(['--' + constants.FEATURE_BAG, 'global',
                                 '--' + constants.FEATURE_FILE, paths.feature_file])
    if has_intercept:
        raw_model_params.extend(['--has_intercept', 'True'])
    else:
        raw_model_params.extend(['--has_intercept', 'False', '--regularize_bias', 'False'])

    if fixed_effect_variance_mode is not None:
        raw_model_params.extend(['--fixed_effect_variance_mode', fixed_effect_variance_mode])

    return base_training_params, schema_params, raw_model_params
Beispiel #5
0
    def _get_params(self):
        base_training_params = setup_fake_base_training_params(
            training_stage=constants.FIXED_EFFECT)
        base_training_params[
            constants.TRAINING_OUTPUT_DIR] = self.training_score_dir
        base_training_params[
            constants.VALIDATION_OUTPUT_DIR] = self.validation_score_dir

        schema_params = setup_fake_schema_params()

        raw_model_params = [
            '--' + constants.FEATURE_BAGS,
            'global',
            '--' + constants.TRAIN_DATA_PATH,
            self.train_data_path,
            '--' + constants.VALIDATION_DATA_PATH,
            self.validation_data_path,
            '--' + constants.METADATA_FILE,
            self.metadata_file,
            '--' + constants.FEATURE_FILE,
            self.feature_file,
            '--' + constants.NUM_OF_LBFGS_ITERATIONS,
            '1',
            '--' + constants.MODEL_OUTPUT_DIR,
            self.model_output_dir,
            '--' + constants.COPY_TO_LOCAL,
            'False',
            # Batch size > number samples to make sure
            # there is no shuffling of data among batches
            '--' + constants.BATCH_SIZE,
            '64',
            '--' + constants.L2_REG_WEIGHT,
            '0.01',
            "--" + constants.REGULARIZE_BIAS,
            'True'
        ]
        return base_training_params, schema_params, raw_model_params
    def test_train_and_predict(self):

        # Create and add AVRO model output directory to raw parameters
        base_training_params, raw_params = self.get_raw_params()
        avro_model_output_dir = tempfile.mkdtemp()
        raw_params.extend(
            ['--' + constants.MODEL_OUTPUT_DIR, avro_model_output_dir])
        raw_params.extend(['--' + constants.ENABLE_LOCAL_INDEXING, 'True'])

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)
        assert re_lr_model

        # TEST 1 - Training (with scoring)
        checkpoint_dir = tempfile.mkdtemp()
        active_train_fd, active_train_output_file = tempfile.mkstemp()
        passive_train_fd, passive_train_output_file = tempfile.mkstemp()
        training_context = {
            constants.ACTIVE_TRAINING_OUTPUT_FILE: active_train_output_file,
            constants.PASSIVE_TRAINING_OUTPUT_FILE: passive_train_output_file,
            constants.PARTITION_INDEX: 0,
            constants.PASSIVE_TRAINING_DATA_PATH: test_dataset_path
        }
        schema_params = setup_fake_schema_params()
        re_lr_model.train(training_data_path=test_dataset_path,
                          validation_data_path=test_dataset_path,
                          metadata_file=os.path.join(test_dataset_path,
                                                     "data.json"),
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)

        # Cycle through model AVRO output and assert each record is a dictionary
        with open(os.path.join(avro_model_output_dir, f"part-{0:05d}.avro"),
                  'rb') as fo:
            for record in reader(fo):
                self.assertTrue(isinstance(record, dict))

        # Cycle through output file and assert each record is a dictionary
        with open(active_train_output_file, 'rb') as fo:
            for record in reader(fo):
                self.assertTrue(isinstance(record, dict))
        with open(passive_train_output_file, 'rb') as fo:
            for record in reader(fo):
                self.assertTrue(isinstance(record, dict))

        # TEST 2 - Cold prediction
        predict_output_dir = tempfile.mkdtemp()
        re_lr_model.predict(output_dir=predict_output_dir,
                            input_data_path=test_dataset_path,
                            metadata_file=os.path.join(test_dataset_path,
                                                       "data.json"),
                            checkpoint_path=avro_model_output_dir,
                            execution_context=training_context,
                            schema_params=schema_params)
        with open(
                os.path.join(predict_output_dir,
                             "part-{0:05d}.avro".format(0)), 'rb') as fo:
            for record in reader(fo):
                self.assertTrue(isinstance(record, dict))

        # TEST 3 - Assert scoring-while-training and cold prediction produce same output
        with open(active_train_output_file, 'rb') as fo:
            active_training_records = [record for record in reader(fo)]
        with open(
                os.path.join(predict_output_dir,
                             "part-{0:05d}.avro".format(0)), 'rb') as fo:
            prediction_records = [record for record in reader(fo)]
        for active_training_record, prediction_record in zip(
                active_training_records, prediction_records):
            self.assertEqual(active_training_record, prediction_record)

        # remove the temp dir(s) and file(s).
        os.close(active_train_fd)
        tf.io.gfile.remove(active_train_output_file)
        os.close(passive_train_fd)
        tf.io.gfile.remove(passive_train_output_file)
        tf.io.gfile.rmtree(avro_model_output_dir)
        tf.io.gfile.rmtree(checkpoint_dir)
        tf.io.gfile.rmtree(predict_output_dir)
    def test_warm_start(self):

        # Step 1: train an initial model
        # Create and add AVRO model output directory to raw parameters
        base_training_params, raw_params = self.get_raw_params()
        avro_model_output_dir = tempfile.mkdtemp()
        raw_params.extend(
            ['--' + constants.MODEL_OUTPUT_DIR, avro_model_output_dir])

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)

        # Initial training to get the warm start model
        checkpoint_dir = tempfile.mkdtemp()
        active_train_fd, active_train_output_file = tempfile.mkstemp()
        passive_train_fd, passive_train_output_file = tempfile.mkstemp()
        training_context = {
            constants.ACTIVE_TRAINING_OUTPUT_FILE: active_train_output_file,
            constants.PASSIVE_TRAINING_OUTPUT_FILE: passive_train_output_file,
            constants.PARTITION_INDEX: 0,
            constants.PASSIVE_TRAINING_DATA_PATH: test_dataset_path
        }
        schema_params = setup_fake_schema_params()
        re_lr_model.train(training_data_path=test_dataset_path,
                          validation_data_path=test_dataset_path,
                          metadata_file=os.path.join(test_dataset_path,
                                                     "data.json"),
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)

        avro_model_output_file = os.path.join(avro_model_output_dir,
                                              f"part-{0:05d}.avro")
        # Read back the model as the warm start initial point.
        initial_model = re_lr_model._load_weights(avro_model_output_file, 0)

        # Step 2: Train for 1 l-bfgs step with warm start
        base_training_params, raw_params = self.get_raw_params('memberId', 1)
        raw_params.extend(
            ['--' + constants.MODEL_OUTPUT_DIR, avro_model_output_dir])

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)

        schema_params = setup_fake_schema_params()
        re_lr_model.train(training_data_path=test_dataset_path,
                          validation_data_path=test_dataset_path,
                          metadata_file=os.path.join(test_dataset_path,
                                                     "data.json"),
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)
        final_model = re_lr_model._load_weights(avro_model_output_file, 0)

        # Check the model has already converged.
        self.assertEqual(len(initial_model), len(final_model))
        for model_id in initial_model:
            self.assertAllClose(initial_model[model_id].theta,
                                final_model[model_id].theta,
                                msg='models mismatch')

        # Step 3: Train for 1 l-bfgs step with cold start
        # Remove the model file to stop the warm start
        model_files = tf.io.gfile.glob(
            os.path.join(avro_model_output_dir, '*.avro'))
        for f in model_files:
            tf.io.gfile.remove(f)
        # Train for 1 l-bfgs step.
        re_lr_model.train(training_data_path=test_dataset_path,
                          validation_data_path=test_dataset_path,
                          metadata_file=os.path.join(test_dataset_path,
                                                     "data.json"),
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)
        cold_model = re_lr_model._load_weights(avro_model_output_file, 0)

        # Check the model has already converged.
        self.assertEqual(len(cold_model), len(final_model))
        for model_id in cold_model:
            self.assertNotAllClose(cold_model[model_id].theta,
                                   final_model[model_id].theta,
                                   msg='models should not be close')

        # remove the temp dir(s) and file(s).
        os.close(active_train_fd)
        tf.io.gfile.remove(active_train_output_file)
        os.close(passive_train_fd)
        tf.io.gfile.remove(passive_train_output_file)
        tf.io.gfile.rmtree(avro_model_output_dir)
        tf.io.gfile.rmtree(checkpoint_dir)
Beispiel #8
0
    def test_model_with_variance(self):
        dataset_idx = 1
        variance_mode = constants.FULL
        # Create training data
        raw_params = self.get_raw_params()
        avro_model_output_dir = tempfile.mkdtemp(dir=self.base_dir)
        raw_params.extend(
            ['--' + constants.OUTPUT_MODEL_DIR, avro_model_output_dir])
        raw_params.extend(['--' + constants.ENABLE_LOCAL_INDEXING, 'True'])
        raw_params.extend(['--random_effect_variance_mode', variance_mode])

        # Replace the feature file
        feature_idx = raw_params.index('--' + constants.FEATURE_FILE)
        del raw_params[feature_idx:feature_idx + 2]
        raw_params.extend([
            '--' + constants.FEATURE_FILE,
            os.path.join(test_dataset_path, 'dataset_1_feature_file.csv')
        ])

        # For this test, we need set l2 to 0.0. See the comments in test_optimizer_helper
        l2_idx = raw_params.index('--' + constants.L2_REG_WEIGHT)
        del raw_params[l2_idx:l2_idx + 2]
        raw_params.extend(['--' + constants.L2_REG_WEIGHT, '0.0'])

        train_tfrecord_dir = tempfile.mkdtemp(dir=self.base_dir)
        train_tfrecord_file = os.path.join(train_tfrecord_dir,
                                           'train.tfrecord')
        # Create dataset with string entity id
        model_ids = self._create_dataset_with_string_entity_id(
            dataset_idx, train_tfrecord_file)
        train_data_dir = train_tfrecord_dir
        # set up metadata file
        metadata_file = os.path.join(test_dataset_path, "dataset_1.json")

        # Create random effect LR LBFGS Model
        trainer = RandomEffectLRLBFGSModel(raw_model_params=raw_params)

        # train the model
        checkpoint_dir = tempfile.mkdtemp(dir=self.base_dir)
        active_train_fd, active_train_output_file = tempfile.mkstemp(
            dir=self.base_dir)
        passive_train_fd, passive_train_output_file = tempfile.mkstemp(
            dir=self.base_dir)
        training_context = {
            constants.ACTIVE_TRAINING_OUTPUT_FILE: active_train_output_file,
            constants.PASSIVE_TRAINING_OUTPUT_FILE: passive_train_output_file,
            constants.PARTITION_INDEX: 0,
            constants.PASSIVE_TRAINING_DATA_DIR: train_data_dir
        }
        schema_params = setup_fake_schema_params()
        trainer.train(training_data_dir=train_data_dir,
                      validation_data_dir=train_data_dir,
                      metadata_file=metadata_file,
                      checkpoint_path=checkpoint_dir,
                      execution_context=training_context,
                      schema_params=schema_params)
        avro_model_output_file = os.path.join(avro_model_output_dir,
                                              f"part-{0:05d}.avro")

        # Read back the model as the warm start initial point.
        model = trainer._load_weights(avro_model_output_file, False)
        actual_mean = model[model_ids[0]].theta
        actual_variance = model[model_ids[0]].variance

        # Get expected model coefficients and variance
        dataset = self._create_dataset(dataset_idx)
        offsets = dataset['offsets'][0]
        y = dataset['responses'][0]
        weights = dataset['weights'][0]
        per_member_indices = dataset['per_member_indices'][0]
        per_member_values = dataset['per_member_values'][0]
        # Convert per-member features to COO matrix
        rows = []
        cols = []
        vals = []
        nrows = len(per_member_indices)
        for ridx in range(len(per_member_indices)):
            for cidx in range(len(per_member_indices[ridx])):
                rows.append(ridx)
                cols.append(per_member_indices[ridx][cidx])
                vals.append(per_member_values[ridx][cidx])
        X = coo_matrix((vals, (rows, cols)), shape=(nrows, 3))
        expected = compute_coefficients_and_variance(
            X=X,
            y=y,
            weights=weights,
            offsets=offsets,
            variance_mode=variance_mode)
        # Compare
        self.assertAllClose(expected[0],
                            actual_mean,
                            rtol=1e-04,
                            atol=1e-04,
                            msg='Mean mismatch')
        self.assertAllClose(expected[1],
                            actual_variance,
                            rtol=1e-04,
                            atol=1e-04,
                            msg='Variance mismatch')
Beispiel #9
0
    def _run_warm_start(self,
                        string_entity_id,
                        intercept_only,
                        enable_local_index,
                        has_intercept=True):

        assert has_intercept or not intercept_only

        # Step 1: train an initial model
        # Create and add AVRO model output directory to raw parameters
        raw_params = self.get_raw_params(intercept_only=intercept_only,
                                         has_intercept=has_intercept)
        avro_model_output_dir = tempfile.mkdtemp(dir=self.base_dir)
        raw_params.extend(
            ['--' + constants.OUTPUT_MODEL_DIR, avro_model_output_dir])
        if enable_local_index:
            raw_params.extend(['--' + constants.ENABLE_LOCAL_INDEXING, 'True'])

        train_data_dir = test_dataset_path
        if string_entity_id:
            train_tfrecord_dir = tempfile.mkdtemp(dir=self.base_dir)
            train_tfrecord_file = os.path.join(train_tfrecord_dir,
                                               'train.tfrecord')
            # create dataset with string entity id
            model_ids = self._create_dataset_with_string_entity_id(
                2, train_tfrecord_file)
            train_data_dir = train_tfrecord_dir
            # set up metadata file
            metadata_file = os.path.join(test_dataset_path,
                                         "data_with_string_entity_id.json")
        elif intercept_only:
            metadata_file = os.path.join(train_data_dir,
                                         "data_intercept_only.json")
        else:
            metadata_file = os.path.join(train_data_dir, "data.json")

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)

        # Initial training to get the warm start model
        checkpoint_dir = tempfile.mkdtemp(dir=self.base_dir)
        active_train_fd, active_train_output_file = tempfile.mkstemp(
            dir=self.base_dir)
        passive_train_fd, passive_train_output_file = tempfile.mkstemp(
            dir=self.base_dir)
        training_context = {
            constants.ACTIVE_TRAINING_OUTPUT_FILE: active_train_output_file,
            constants.PASSIVE_TRAINING_OUTPUT_FILE: passive_train_output_file,
            constants.PARTITION_INDEX: 0,
            constants.PASSIVE_TRAINING_DATA_DIR: train_data_dir
        }
        schema_params = setup_fake_schema_params()
        re_lr_model.train(training_data_dir=train_data_dir,
                          validation_data_dir=train_data_dir,
                          metadata_file=metadata_file,
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)

        avro_model_output_file = os.path.join(avro_model_output_dir,
                                              f"part-{0:05d}.avro")
        # Read back the model as the warm start initial point.
        initial_model = re_lr_model._load_weights(avro_model_output_file,
                                                  False)
        if intercept_only:
            self._check_intercept_only_model(initial_model)

        # Step 2: Train for 1 l-bfgs step with warm start
        raw_params = self.get_raw_params('memberId', 1, intercept_only,
                                         has_intercept)
        raw_params.extend(
            ['--' + constants.OUTPUT_MODEL_DIR, avro_model_output_dir])
        if enable_local_index:
            raw_params.extend(['--' + constants.ENABLE_LOCAL_INDEXING, 'True'])

        # Create random effect LR LBFGS Model
        re_lr_model = RandomEffectLRLBFGSModel(raw_model_params=raw_params)

        schema_params = setup_fake_schema_params()
        re_lr_model.train(training_data_dir=train_data_dir,
                          validation_data_dir=train_data_dir,
                          metadata_file=metadata_file,
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)
        final_model = re_lr_model._load_weights(avro_model_output_file, False)

        if intercept_only:
            self._check_intercept_only_model(final_model)
        # Check the model has already converged.
        self.assertEqual(len(initial_model), len(final_model))
        for model_id in initial_model:
            if string_entity_id:
                # make sure the model is is string not bytes.
                self.assertTrue(model_id in model_ids)
            self.assertAllClose(initial_model[model_id].theta,
                                final_model[model_id].theta,
                                msg='models mismatch')

        # Step 3: Train for 1 l-bfgs step with cold start
        # Remove the model file to stop the warm start
        model_files = low_rpc_call_glob(
            os.path.join(avro_model_output_dir, '*.avro'))
        for f in model_files:
            tf.io.gfile.remove(f)
        # Train for 1 l-bfgs step.
        re_lr_model.train(training_data_dir=train_data_dir,
                          validation_data_dir=train_data_dir,
                          metadata_file=metadata_file,
                          checkpoint_path=checkpoint_dir,
                          execution_context=training_context,
                          schema_params=schema_params)
        cold_model = re_lr_model._load_weights(avro_model_output_file, False)

        if intercept_only:
            self._check_intercept_only_model(cold_model)

        # Check the models are different.
        self.assertEqual(len(cold_model), len(final_model))
        for model_id in cold_model:
            if string_entity_id:
                # make sure the model is is string not bytes.
                self.assertTrue(model_id in model_ids)
            self.assertNotAllClose(cold_model[model_id].theta,
                                   final_model[model_id].theta,
                                   msg='models should not be close')