Пример #1
0
    def test_batch_norm_class(self):
        # This tests the model and trainer set up
        train_config_text_proto = """
        optimizer {
          gradient_descent {
            learning_rate {
              constant_learning_rate {
                learning_rate: 1.0
              }
            }
          }
        }
        max_iterations: 5
        """
        model_config_text_proto = """
            path_drop_probabilities: [1.0, 1.0]
        """
        train_config = train_pb2.TrainConfig()
        text_format.Merge(train_config_text_proto, train_config)

        model_config = model_pb2.ModelConfig()
        text_format.Merge(model_config_text_proto, model_config)
        train_config.overwrite_checkpoints = True
        test_root_dir = '/tmp/avod_unit_test/'

        paths_config = model_config.paths_config
        paths_config.logdir = test_root_dir + 'logs/'
        paths_config.checkpoint_dir = test_root_dir

        classifier = FakeBatchNormClassifier(model_config)
        trainer.train(classifier, train_config)
Пример #2
0
def get_model_config_from_file(config_path):
    """Reads model configuration from a configuration file.
       This merges the layer config info with model default configs.
    Args:
        config_path: A path to the config

    Returns:
        layers_config: A configured model_pb2 config
    """

    model_config = model_pb2.ModelConfig()
    with open(config_path, 'r') as f:
        text_format.Merge(f.read(), model_config)
    return model_config