예제 #1
0
def test_step_schedule(backend_default):
    """
    Test the StepSchedule class
    """
    step_config = [1, 4, 5]
    change = [0.1, 0.3, 0.4]
    sch = StepSchedule(step_config=step_config, change=change)

    target_lr = [1.0, 0.1, 0.1, 0.1, 0.3, 0.4, 0.4, 0.4, 0.4]

    for e, lr in enumerate(target_lr):
        assert allclose_with_out(lr, sch.get_learning_rate(learning_rate=1.0, epoch=e))
예제 #2
0
def test_step_schedule(backend_default):
    """
    Test the StepSchedule class
    """
    step_config = [1, 4, 5]
    change = [0.1, 0.3, 0.4]
    sch = StepSchedule(step_config=step_config, change=change)

    target_lr = [1.0, 0.1, 0.1, 0.1, 0.3, 0.4, 0.4, 0.4, 0.4]

    for e, lr in enumerate(target_lr):
        assert np.allclose(lr, sch.get_learning_rate(learning_rate=1.0, epoch=e))
예제 #3
0
파일: train.py 프로젝트: leo-lp/neon-1
frcn_tree_cost = Multicost(costs=[
    GeneralizedCostMask(costfunc=CrossEntropyMulti(), weights=roi_w),
    GeneralizedCostMask(costfunc=SmoothL1Loss(), weights=roi_w)
],
                           weights=[1, 1])

cost = Multicost(costs=[
    GeneralizedCostMask(costfunc=CrossEntropyMulti(), weights=weights),
    GeneralizedCostMask(costfunc=SmoothL1Loss(sigma=3.0), weights=weights),
    frcn_tree_cost,
],
                 weights=[1, 1, 1])

# setup optimizer
schedule_w = StepSchedule(step_config=[10], change=[0.001 / 10])
schedule_b = StepSchedule(step_config=[10], change=[0.002 / 10])

opt_w = GradientDescentMomentum(0.001, 0.9, wdecay=0.0005, schedule=schedule_w)
opt_b = GradientDescentMomentum(0.002, 0.9, wdecay=0.0005, schedule=schedule_b)
opt_skip = GradientDescentMomentum(0.0, 0.0)

optimizer = MultiOptimizer({
    'default': opt_w,
    'Bias': opt_b,
    'skip': opt_skip,
    'skip_bias': opt_skip
})

# if training a new model, seed the image model conv layers with pre-trained weights
# otherwise, just load the model file