Exemplo n.º 1
0
def model_fn(features, labels, params, mode, config):
    model = plx.models.Classifier(mode=mode,
                                  graph_fn=graph_fn,
                                  loss=SigmoidCrossEntropyConfig(),
                                  optimizer=AdamConfig(learning_rate=0.001),
                                  metrics=[AccuracyConfig()],
                                  summaries=['loss'],
                                  one_hot_encode=True,
                                  n_classes=10)
    return model(features=features,
                 labels=labels,
                 params=params,
                 config=config)
Exemplo n.º 2
0
def model_fn(features, labels, params, mode, config):
    model = plx.models.Classifier(
        mode=mode,
        graph_fn=graph_fn,
        loss=SigmoidCrossEntropyConfig(),
        optimizer=AdamConfig(
            learning_rate=0.007, decay_type='exponential_decay', decay_rate=0.1),
        metrics=[
            AccuracyConfig(),
            PrecisionConfig()
        ],
        summaries='all',
        one_hot_encode=True,
        n_classes=10)
    return model(features=features, labels=labels, params=params, config=config)
 def test_classifier_model_config(self):
     config_dict = {
         'graph': {
             'input_layers': ['image'],
             'output_layers': ['dense_0'],
             'layers': [{
                 'Conv2D': {
                     'filters': 64,
                     'strides': [1, 1],
                     'kernel_size': [2, 2],
                     'activation': 'relu',
                     'name': 'convolution_1',
                 }
             }, {
                 'Dense': {
                     'units': 17,
                     'name': 'dense_0'
                 }
             }]
         },
         'one_hot_encode':
         True,
         'n_classes':
         10,
         'loss':
         SoftmaxCrossEntropyConfig(['image', 0, 0],
                                   output_layer=['dense_0', 0,
                                                 0]).to_schema(),
         'optimizer':
         AdamConfig(learning_rate=0.01).to_schema(),
         'metrics': [
             AccuracyConfig(input_layer=['image', 0, 0],
                            output_layer=['dense_0', 0, 0]).to_schema(),
             PrecisionConfig(input_layer=['image', 0, 0],
                             output_layer=['dense_0', 0, 0]).to_schema(),
         ],
         'summaries': ['loss', 'gradients'],
         'clip_gradients':
         0.5,
         'clip_embed_gradients':
         0.,
         'name':
         'model'
     }
     config = ClassifierConfig.from_dict(config_dict)
     config_to_dict = config.to_dict()
     self.assert_equal_models(config_to_dict, config_dict)
Exemplo n.º 4
0
 def test_generator_model_config(self):
     config_dict = {
         'bridge':
         NoOpBridgeConfig().to_schema(),
         'encoder': {
             'input_layers': ['image'],
             'output_layers': ['encoded'],
             'layers': [{
                 'Dense': {
                     'units': 1,
                     'name': 'encoded'
                 }
             }]
         },
         'decoder': {
             'input_layers': ['image'],
             'output_layers': ['encoded'],
             'layers': [{
                 'Dense': {
                     'units': 1,
                     'name': 'decoded'
                 }
             }]
         },
         'loss':
         MeanSquaredErrorConfig(input_layer=['image', 0, 0],
                                output_layer=['decoded', 0, 0]).to_schema(),
         'optimizer':
         AdamConfig(learning_rate=0.01).to_schema(),
         'metrics': [],
         'summaries': ['loss', 'gradients'],
         'clip_gradients':
         0.5,
         'clip_embed_gradients':
         0.,
         'name':
         'model'
     }
     config = GeneratorConfig.from_dict(config_dict)
     config_to_dict = config.to_dict()
     assert config_dict.pop('bridge') == config_to_dict.pop('bridge')
     assert_equal_graphs(config_dict.pop('encoder'),
                         config_to_dict.pop('encoder'))
     assert_equal_graphs(config_dict.pop('decoder'),
                         config_to_dict.pop('decoder'))
     assert_equal_dict(config_dict, config_to_dict)
Exemplo n.º 5
0
 def test_adam_config(self):
     config_dict = {
         'learning_rate': 0.001,
         'beta1': 0.9,
         'beta2': 0.999,
         'epsilon': 1e-10,
         'decay_type': "",
         'decay_rate': 0.,
         'decay_steps': 100,
         'start_decay_at': 0,
         'stop_decay_at': 1e10,
         'min_learning_rate': 1e-12,
         'staircase': False,
         'global_step': None,
         'use_locking': False,
         'name': 'optimizer'
     }
     config = AdamConfig.from_dict(config_dict)
     assert_equal_dict(config.to_dict(), config_dict)
Exemplo n.º 6
0
 def test_regressor_model_config(self):
     config_dict = {
         'graph': {
             'input_layers': ['image'],
             'output_layers': ['dense_0'],
             'layers': [
                 {'LSTM': {'units': 5}},
                 {'Dense': {'units': 1, 'name': 'dense_0'}}
             ]
         },
         'loss': MeanSquaredErrorConfig(input_layer=['image', 0, 0],
                                        output_layer=['dense_0', 0, 0]).to_schema(),
         'optimizer': AdamConfig(learning_rate=0.01).to_schema(),
         'metrics': [],
         'summaries': ['loss', 'gradients'],
         'clip_gradients': 0.5,
         'clip_embed_gradients': 0.,
         'name': 'model'}
     config = RegressorConfig.from_dict(config_dict)
     config_to_dict = config.to_dict()
     self.assert_equal_models(config_to_dict, config_dict)
Exemplo n.º 7
0
    def __init__(self,
                 mode,
                 model_type,
                 graph_fn,
                 loss,
                 optimizer=None,
                 metrics=None,
                 summaries='all',
                 clip_gradients=0.5,
                 clip_embed_gradients=0.1,
                 name="Model"):

        # Check if mode corresponds to the correct model
        if mode in [Modes.GENERATE, Modes.ENCODE
                    ] and model_type != self.Types.GENERATOR:
            raise TypeError(
                "Current model type `{}` does not support passed mode `{}`.".
                format(model_type, mode))

        super(BaseModel, self).__init__(mode, name or "Model",
                                        self.ModuleType.MODEL)
        self.loss = loss
        self.optimizer = optimizer or AdamConfig(learning_rate=0.001)
        self.metrics = metrics or []
        self.model_type = model_type
        self.summaries = summarizer.SummaryOptions.validate(summaries)
        assert model_type in self.Types.VALUES, "`model_type` provided is unsupported."
        self._clip_gradients = clip_gradients
        self._clip_embed_gradients = clip_embed_gradients
        self._grads_and_vars = None
        self._total_loss = None
        self._losses = None
        self._loss = None

        self._check_subgraph_fn(function=graph_fn, function_name='graph_fn')
        self._graph_fn = graph_fn