Exemplo n.º 1
0
    def test_learning_rate_value(self):
        lr = -1.0
        with pytest.raises(ValueError):
            dr.exponential_decay_lr(lr, decay_rate, total_step, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.polynomial_decay_lr(lr, end_learning_rate, total_step, step_per_epoch, decay_epoch, power)
Exemplo n.º 2
0
def test_exponential_decay():
    lr1 = dr.exponential_decay_lr(learning_rate, decay_rate, total_step,
                                  step_per_epoch, decay_epoch)
    assert len(lr1) == total_step

    lr2 = dr.exponential_decay_lr(learning_rate, decay_rate, total_step,
                                  step_per_epoch, decay_epoch, True)
    assert len(lr2) == total_step
Exemplo n.º 3
0
    def test_total_step1(self):
        total_step1 = 2.0
        with pytest.raises(ValueError):
            dr.exponential_decay_lr(learning_rate, decay_rate, total_step1, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.cosine_decay_lr(min_lr, max_lr, total_step1, step_per_epoch, decay_epoch)

        with pytest.raises(ValueError):
            dr.polynomial_decay_lr(learning_rate, end_learning_rate, total_step1, step_per_epoch, decay_epoch, power)
Exemplo n.º 4
0
    def test_decay_epoch1(self):
        decay_epoch1 = 'm'
        with pytest.raises(TypeError):
            dr.exponential_decay_lr(learning_rate, decay_rate, total_step,
                                    step_per_epoch, decay_epoch1)

        with pytest.raises(TypeError):
            dr.cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch,
                               decay_epoch1)

        with pytest.raises(TypeError):
            dr.polynomial_decay_lr(learning_rate, end_learning_rate,
                                   total_step, step_per_epoch, decay_epoch1,
                                   power)
Exemplo n.º 5
0
    parser.add_argument('--device_id', type=int, default=0, help='device id of GPU. (Default: 0)')
    args = parser.parse_args()

    if args.device_target == "CPU":
        args.dataset_sink_mode = False

    context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, device_id=args.device_id)

    network = Inceptionv3(cfg.num_classes)
    net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, 
                reduction="mean", smooth_factor=cfg.label_smoothing_eps)
    ds_train = create_dataset(args.data_path, cfg.batch_size, cfg.epoch_size)
    step_per_epoch = ds_train.get_dataset_size()
    total_step = step_per_epoch * cfg.epoch_size
    lr = exponential_decay_lr(learning_rate=cfg.lr_init, 
            decay_rate=cfg.lr_decay_rate, total_step=total_step, 
            step_per_epoch=step_per_epoch, decay_epoch=cfg.lr_decay_epoch)
    net_opt = nn.RMSProp(network.trainable_params(), learning_rate=lr, 
                decay=cfg.rmsprop_decay, momentum=cfg.rmsprop_momentum, 
                epsilon=cfg.rmsprop_epsilon)
    time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())
    config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps,
                                 keep_checkpoint_max=cfg.keep_checkpoint_max)
    ckpoint_cb = ModelCheckpoint(prefix="checkpoint_inceptionv3", config=config_ck)
    # summary_cb = SummaryCollector(args.summary_path,
    #                             collect_freq=1,
    #                             keep_default_action=False,
    #                             collect_specified_data={'collect_graph': True})
    model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})

    print("============== Starting Training ==============")
Exemplo n.º 6
0
 def test_decay_rate_value(self):
     rate = -1.0
     with pytest.raises(ValueError):
         dr.exponential_decay_lr(learning_rate, rate, total_step,
                                 step_per_epoch, decay_epoch)
Exemplo n.º 7
0
 def test_decay_rate_type(self):
     rate = 'a'
     with pytest.raises(TypeError):
         dr.exponential_decay_lr(learning_rate, rate, total_step,
                                 step_per_epoch, decay_epoch)
Exemplo n.º 8
0
 def test_is_stair(self):
     is_stair = 1
     with pytest.raises(TypeError):
         dr.exponential_decay_lr(learning_rate, decay_rate, total_step,
                                 step_per_epoch, decay_epoch, is_stair)