def __init__(self, mode, encoder_fn, decoder_fn, bridge_fn, loss=None, optimizer=None, summaries='all', metrics=None, clip_gradients=0.5, clip_embed_gradients=0.1, name="Generator"): optimizer = optimizer or AdadeltaConfig(learning_rate=0.4) loss = loss or SigmoidCrossEntropyConfig() self._check_subgraph_fn(function=encoder_fn, function_name='encoder_fn') self._check_subgraph_fn(function=decoder_fn, function_name='decoder_fn') self._check_bridge_fn(function=bridge_fn) self._encode_fn = encoder_fn self._decoder_fn = decoder_fn self._bridge_fn = bridge_fn graph_fn = self._build_graph_fn() super(Generator, self).__init__( mode=mode, name=name, model_type=self.Types.GENERATOR, graph_fn=graph_fn, loss=loss, optimizer=optimizer, metrics=metrics, summaries=summaries, clip_gradients=clip_gradients, clip_embed_gradients=clip_embed_gradients)
def model_fn(features, labels, params, mode, config): model = plx.models.Generator( mode=mode, encoder_fn=encoder_fn, decoder_fn=decoder_fn, bridge_fn=bridge_fn, loss=MeanSquaredErrorConfig(), optimizer=AdadeltaConfig(learning_rate=0.9), summaries=['loss', 'image_input', 'image_result']) return model(features=features, labels=labels, params=params, config=config)
def test_return_estimator_spec(self): x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries=['learning_rate'], name='test') assert isinstance(model(x, y, None, None), EstimatorSpec)
def test_adadelta_config(self): config_dict = { 'learning_rate': 0.001, 'epsilon': 1e-08, 'rho': 0.95, 'decay_type': "", 'decay_rate': 0., 'decay_steps': 100, 'start_decay_at': 0, 'stop_decay_at': 1e10, 'min_learning_rate': 1e-12, 'staircase': False, 'global_step': None, 'use_locking': False, 'name': 'optimizer' } config = AdadeltaConfig.from_dict(config_dict) assert_equal_dict(config.to_dict(), config_dict)
def test_handle_predict_mode(self): x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel(plx.Modes.PREDICT, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries=['learning_rate'], name='test') specs = model(x, y, None, None) assert specs.loss is None assert specs.predictions is not None assert 'losses' not in specs.predictions assert specs.train_op is None
def test_build_no_summaries(self): x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries=[], name='test') model(x, y, None, None) # Only activations are created summaries_by_names = get_tracked( collection=tf.GraphKeys.SUMMARIES_BY_NAMES) assert summaries_by_names == {}
def test_does_not_build_learning_rate_summaries_if_no_decay(self): x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries=['learning_rate'], name='test') model(x, y, None, None) # Only var are created summaries_names = list( get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys()) assert len(summaries_names) == 0
def test_build_variables_summaries(self): x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel(plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries=['variables'], name='test') model(x, y, None, None) # Only var are created variable_names = {var.op.name for var in tf.trainable_variables()} summaries_names = set( get_tracked(collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys()) assert variable_names == summaries_names
def test_build_all_summaries(self): training.create_global_step() x = {'x': tf.placeholder(tf.float32, [2, 89])} y = tf.constant([[1], [1]]) model = BaseModel( plx.Modes.TRAIN, graph_fn=self.get_dummy_graph_fn(), loss=LogLossConfig(), optimizer=AdadeltaConfig(decay_type='exponential_decay'), model_type=BaseModel.Types.CLASSIFIER, metrics=[], summaries='all', name='test') model(x, y, None, None) # Only var are created learning_rate_summaries = 0 activations_summaries = 0 gradients_summaries = 0 loss_summaries = 0 for s_name in get_tracked( collection=tf.GraphKeys.SUMMARIES_BY_NAMES).keys(): if 'learning_rate' in s_name: learning_rate_summaries += 1 elif 'Activation' in s_name: activations_summaries += 1 elif 'Loss' in s_name: loss_summaries += 1 elif 'Gradient' in s_name: gradients_summaries += 1 assert learning_rate_summaries > 0 assert activations_summaries > 0 assert gradients_summaries > 0 assert loss_summaries > 0