Exemplo n.º 1
0
    def test_milestone2(self):
        milestone1 = [20, 10, 1]
        with pytest.raises(ValueError):
            dr.piecewise_constant_lr(milestone1, learning_rates)

        milestone2 = [1.0, 2.0, True]
        with pytest.raises(TypeError):
            dr.piecewise_constant_lr(milestone2, learning_rates)
Exemplo n.º 2
0
def train():
    context.set_context(
        mode=context.GRAPH_MODE,
        device_target="Ascend",
        #save_graphs=True,
        #save_graphs_path="/home/work/user-job-dir/EAST/",
        #enable_reduce_precision=False,
        #device_id=5
    )

    epoch = 600

    my_dataset.download_dataset()
    train_img_path = os.path.abspath('/cache/train_img')
    train_gt_path = os.path.abspath('/cache/train_gt')
    #my_dataset.data_to_mindrecord_byte_image(train_img_path, train_gt_path, mindrecord_dir='/cache', prefix='icdar_train.mindrecord',file_num=1)
    #dataset = my_dataset.create_icdar_train_dataset(mindrecord_file=['icdar_train.mindrecord0','icdar_train.mindrecord1','icdar_train.mindrecord2','icdar_train.mindrecord3'], batch_size=32, repeat_num=epoch,
    #                            is_training=True, num_parallel_workers=8, length=512, scale=0.25)
    #dataset = my_dataset.create_icdar_train_dataset(mindrecord_file='/cache/icdar_train.mindrecord', batch_size=32, repeat_num=epoch,
    #                            is_training=True, num_parallel_workers=24, length=512, scale=0.25)
    #dataset = my_dataset.create_demo_dataset(batch_size=21, repeat_num=2)
    #train_img_path = os.path.abspath('/home/licheng/gpzlx1/ICDAR_2015/train/img')
    #train_gt_path  = os.path.abspath('/home/licheng/gpzlx1/ICDAR_2015/train/gt')
    dataset = datasetV2.create_icdar_train_dataset(train_img_path,
                                                   train_gt_path,
                                                   batch_size=14,
                                                   repeat_num=1,
                                                   is_training=True,
                                                   num_parallel_workers=24)
    #dataset = datasetV3.create_icdar_train_dataset(train_img_path, train_gt_path, batch_size=14, repeat_num=1, is_training=True, num_parallel_workers=24)
    dataset_size = dataset.get_dataset_size()

    print("Create dataset done!, dataset_size: ", dataset_size)

    #east = EAST.EAST()
    net = EAST_VGG.EAST()

    #ckpt_config = CheckpointConfig(save_checkpoint_steps=dataset_size * 20)
    #ckpoint_cb = ModelCheckpoint(prefix='EAST', directory='/cache', config=ckpt_config)

    milestone = [100, 300]
    learning_rates = [1e-3, 1e-4]
    lr = piecewise_constant_lr(milestone, learning_rates)
    opt = nn.Adam(filter(lambda x: x.requires_grad, net.get_parameters()),
                  learning_rate=lr)
    net = my_loss.EASTWithLossCell(net)
    net = my_loss.TrainingWrapper(net, opt)
    net.set_train(True)

    callback = [TimeMonitor(data_size=dataset_size),
                LossMonitor()]  #, ckpoint_cb]

    model = Model(net)
    dataset_sink_mode = False
    print("start trainig")
    model.train(epoch,
                dataset,
                callbacks=callback,
                dataset_sink_mode=dataset_sink_mode)
Exemplo n.º 3
0
 def test_learning_rates2(self):
     lr = [1, 2, 1]
     with pytest.raises(TypeError):
         dr.piecewise_constant_lr(milestone, lr)
Exemplo n.º 4
0
 def test_milestone1(self):
     milestone1 = 1
     with pytest.raises(TypeError):
         dr.piecewise_constant_lr(milestone1, learning_rates)
Exemplo n.º 5
0
def test_learning_rate():
    lr = dr.piecewise_constant_lr(milestone, learning_rates)
    assert len(lr) == milestone[-1]
Exemplo n.º 6
0
    dataset = create_dataset("/home/dingfeifei/datasets/faces_webface_112x112_raw_image", \
        p=config.p, k=config.k)

    step_size = dataset.get_dataset_size()
    base_lr = config.learning_rate
    warm_up_epochs = config.lr_warmup_epochs
    lr_decay_epochs = config.lr_decay_epochs
    lr_decay_factor = config.lr_decay_factor
    lr_decay_steps = []
    lr_decay = []
    for i, v in enumerate(lr_decay_epochs):
        lr_decay_steps.append(v * step_size)
        lr_decay.append(base_lr * lr_decay_factor**i)
    lr_1 = warmup_lr(base_lr, step_size * warm_up_epochs, step_size,
                     warm_up_epochs)
    lr_2 = piecewise_constant_lr(lr_decay_steps, lr_decay)
    lr = lr_1 + lr_2

    train_net = TrainStepWrap(loss_net, lr, config.momentum)
    test_net = TestStepWrap(net)

    f = open("checkpoints/pretrained_resnet50.pkl", "rb")
    param_dict = pickle.load(f)
    load_param_into_net(net=train_net, parameter_dict=param_dict)

    model = Model(train_net,
                  eval_network=test_net,
                  metrics={"Accuracy": Accuracy()})

    loss_cb = LossMonitor()
    cb = [loss_cb]
Exemplo n.º 7
0
 def test_learning_rates1(self):
     lr = True
     with pytest.raises(ValueError):
         dr.piecewise_constant_lr(milestone, lr)