def __init__(self, model_params=None, json_path=None): super().__init__() self.model_params = get_from_dicts(model_params, default_parameters) self.model_params = get_from_json(json_path, self.model_params) self._sanity_checks() logging.info("Model parameters : %s", self.model_params) self.input_type = self.model_params['input']['type'] self.model_dir = self.model_params['output']['save_model_dir'] self.config = get_tf_config(self.model_params) self.model = tf.estimator.Estimator( model_fn=self.model_fn, model_dir=self.model_dir, params=self.model_params, config=self.config)
def test_get_tf_config(): params = {'training': {'mode': 'test'}} with pytest.raises(ValueError, match="mode should be local or distributed"): get_tf_config(params) # conf for local training params.update({ 'training': { 'mode': 'local', 'log_steps': 10 }, 'resource': { 'num_cpu': 4, 'num_thread': 4, 'num_gpu': 1 } }) get_tf_config(params) # conf for distributed training params.update({ 'training': { 'mode': 'distributed', 'log_steps': 10 }, 'resource': { 'num_cpu': 4, 'num_thread': 4, 'num_gpu': 2 } }) get_tf_config(params)
def test_get_tf_config(): params = {"training": {"mode": "test"}} with pytest.raises(ValueError, match="mode should be local or distributed"): get_tf_config(params) # conf for local training params.update( { "training": {"mode": "local", "log_steps": 10}, "resource": {"num_cpu": 4, "num_thread": 4, "num_gpu": 1}, } ) get_tf_config(params) # conf for distributed training params.update( { "training": {"mode": "distributed", "log_steps": 10}, "resource": {"num_cpu": 4, "num_thread": 4, "num_gpu": 2}, } ) get_tf_config(params)