Beispiel #1
0
bnm_scheduler = BNMomentumScheduler(detector, bn_lambda=bn_lbmd, last_epoch=-1)

def get_current_lr(epoch):
    lr = BASE_LEARNING_RATE
    for i, lr_decay_epoch in enumerate(LR_DECAY_STEPS):
        if epoch >= lr_decay_epoch:
            lr *= LR_DECAY_RATES[i]
    return lr

def adjust_learning_rate(optimizer, epoch):
    lr = get_current_lr(epoch)
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr

# TFBoard Visualizers
TRAIN_VISUALIZER = TfVisualizer(LOG_DIR, 'train')
TEST_VISUALIZER = TfVisualizer(LOG_DIR, 'test')

# Used for Pseudo box generation and AP calculation
CONFIG_DICT = {'dataset_config': DATASET_CONFIG,
               'remove_empty_box': False, 'use_3d_nms': True,
               'nms_iou': 0.25, 'use_old_type_nms': False, 'cls_nms': True,
               'per_class_proposal': True, 'conf_thresh': 0.05}

print('************************** GLOBAL CONFIG END **************************')
# ------------------------------------------------------------------------- GLOBAL CONFIG END

def update_ema_variables(model, ema_model, alpha, global_step):
    # Use the true average until the exponential average is more correct
    alpha = min(1 - 1 / (global_step + 1), alpha)
    for ema_param, param in zip(ema_model.parameters(), model.parameters()):
Beispiel #2
0
def get_current_lr(epoch):
    lr = BASE_LEARNING_RATE
    for i, lr_decay_epoch in enumerate(LR_DECAY_STEPS):
        if epoch >= lr_decay_epoch:
            lr *= LR_DECAY_RATES[i]
    return lr


def adjust_learning_rate(optimizer, epoch):
    lr = get_current_lr(epoch)
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr


# TFBoard Visualizers
TRAIN_VISUALIZER = TfVisualizer(FLAGS, 'train')
TEST_VISUALIZER = TfVisualizer(FLAGS, 'test')

# Used for AP calculation
CONFIG_DICT = {
    'remove_empty_box': False,
    'use_3d_nms': True,
    'nms_iou': 0.25,
    'use_old_type_nms': False,
    'cls_nms': True,
    'per_class_proposal': True,
    'conf_thresh': 0.05,
    'dataset_config': DATASET_CONFIG
}

# ------------------------------------------------------------------------- GLOBAL CONFIG END
Beispiel #3
0
def get_current_lr(epoch):
    lr = BASE_LEARNING_RATE
    for i, lr_decay_epoch in enumerate(LR_DECAY_STEPS):
        if epoch >= lr_decay_epoch:
            lr *= LR_DECAY_RATES[i]
    return lr


def adjust_learning_rate(optimizer, epoch):
    lr = get_current_lr(epoch)
    for param_group in optimizer.param_groups:
        param_group["lr"] = lr


# TFBoard Visualizers
TRAIN_VISUALIZER = TfVisualizer(FLAGS, "train")
TEST_VISUALIZER = TfVisualizer(FLAGS, "test")

# Used for AP calculation
CONFIG_DICT = {
    "remove_empty_box": False,
    "use_3d_nms": True,
    "nms_iou": 0.25,
    "use_old_type_nms": False,
    "cls_nms": True,
    "per_class_proposal": True,
    "conf_thresh": 0.05,
    "dataset_config": DATASET_CONFIG,
}

# ------------------------------------------------------------------------- GLOBAL CONFIG END