コード例 #1
0
 def test_base_model_config(self):
     config_dict = {
         'graph': {
             'input_layers': ['image'],
             'output_layers': ['dense_0'],
             'layers': [
                 {
                     'Conv2D': {
                         'filters': 64,
                         'strides': [1, 1],
                         'kernel_size': [2, 2],
                         'activation': 'relu',
                         'name': 'convolution_1',
                     }
                 },
                 {'Dense': {'units': 17, 'name': 'dense_0'}}
             ]
         },
         'loss': SoftmaxCrossEntropyConfig(input_layer=['image', 0, 0],
                                           output_layer=['dense_0', 0, 0]).to_schema(),
         'optimizer': AdamConfig(learning_rate=0.01).to_schema(),
         'metrics': [
             AccuracyConfig(input_layer=['image', 0, 0],
                            output_layer=['dense_0', 0, 0]).to_schema(),
             PrecisionConfig(input_layer=['image', 0, 0],
                             output_layer=['dense_0', 0, 0]).to_schema(),
         ],
         'summaries': ['loss', 'gradients'],
         'clip_gradients': 0.5,
         'clip_embed_gradients': 0.,
         'name': 'model'}
     config = BaseModelConfig.from_dict(config_dict)
     config_to_dict = config.to_dict()
     self.assert_equal_models(config_to_dict, config_dict)
コード例 #2
0
ファイル: vgg19.py プロジェクト: saadmahboob/polyaxon
def model_fn(features, labels, params, mode, config):
    model = plx.models.Classifier(
        mode=mode,
        graph_fn=graph_fn,
        loss=SigmoidCrossEntropyConfig(),
        optimizer=AdamConfig(
            learning_rate=0.007, decay_type='exponential_decay', decay_rate=0.1),
        metrics=[
            AccuracyConfig(),
            PrecisionConfig()
        ],
        summaries='all',
        one_hot_encode=True,
        n_classes=10)
    return model(features=features, labels=labels, params=params, config=config)