Пример #1
0
    def test_learning_rate_without_decay_or_warmups(self):
        params = base_configs.LearningRateConfig(name='exponential',
                                                 initial_lr=0.01,
                                                 decay_rate=0.01,
                                                 decay_epochs=None,
                                                 warmup_epochs=None,
                                                 scale_by_batch_size=0.01,
                                                 examples_per_epoch=1,
                                                 boundaries=[0],
                                                 multipliers=[0, 1])
        batch_size = 1
        train_steps = 1

        lr = optimizer_factory.build_learning_rate(params=params,
                                                   batch_size=batch_size,
                                                   train_steps=train_steps)
        self.assertTrue(
            issubclass(type(lr),
                       tf.keras.optimizers.schedules.LearningRateSchedule))
Пример #2
0
    def test_learning_rate_with_decay_and_warmup(self, lr_decay_type):
        """Basic smoke test for syntax."""
        params = base_configs.LearningRateConfig(name=lr_decay_type,
                                                 initial_lr=0.01,
                                                 decay_rate=0.01,
                                                 decay_epochs=1,
                                                 warmup_epochs=1,
                                                 scale_by_batch_size=0.01,
                                                 examples_per_epoch=1,
                                                 boundaries=[0],
                                                 multipliers=[0, 1])
        batch_size = 1
        train_steps = 1

        lr = optimizer_factory.build_learning_rate(params=params,
                                                   batch_size=batch_size,
                                                   train_steps=train_steps)
        self.assertTrue(
            issubclass(type(lr),
                       tf.keras.optimizers.schedules.LearningRateSchedule))
Пример #3
0
class SqueezeNetModelConfig(base_configs.ModelConfig):
    """Configuration for the SqueezeNet model."""
    name: str = 'SqueezeNet'
    num_classes: int = 1000
    model_params: Mapping[str,
                          Any] = dataclasses.field(default_factory=lambda: {
                              'num_classes': 1000,
                              'batch_size': None,
                          })
    loss: base_configs.LossConfig = base_configs.LossConfig(
        name='sparse_categorical_crossentropy')
    optimizer: base_configs.OptimizerConfig = base_configs.OptimizerConfig(
        name='momentum',
        decay=0.9,
        epsilon=0.001,
        momentum=0.9,
        moving_average_decay=None)
    learning_rate: base_configs.LearningRateConfig = (
        base_configs.LearningRateConfig(name='piecewise_constant_with_warmup',
                                        examples_per_epoch=1281167,
                                        warmup_epochs=_LR_WARMUP_EPOCHS,
                                        boundaries=_LR_BOUNDARIES,
                                        multipliers=_LR_MULTIPLIERS))