예제 #1
0
def get_config(args):
    config = get_cfg_defaults()
    if args.configs:
        yml_file = args.configs
        config.merge_from_file(yml_file)
    config.freeze()
    return config
예제 #2
0
def get_config():
    config = get_cfg_defaults()
    if args.configs:
        yml_file = args.configs
        config.merge_from_file(yml_file)
    config.merge_from_list(['dist_local_rank', args.local_rank])
    config.freeze()
    return config
예제 #3
0
def get_config(args):
    config = get_cfg_defaults()
    if args.configs:
        yml_file = args.configs
        config.merge_from_file(yml_file)
    if args.csv is not None:
        config.merge_from_list(['test.dataset', args.csv])
    config.freeze()
    return config
예제 #4
0
def get_config(configs_file):
    config = get_cfg_defaults()
    config.merge_from_file(configs_file)
    config.freeze()
    return config
예제 #5
0
            step -= self.warmup_steps
            lr = cosine_scheduler(step, self.base_lr, self.min_lr, self.total_steps)

        return lr


def get_scheduler(config, optimizer, steps_per_epoch):
    lr_scheduler = MultiScheduler(config, steps_per_epoch)
    return torch.optim.lr_scheduler.LambdaLR(optimizer, lr_scheduler)


if __name__ == '__main__':
    from configs.config import get_cfg_defaults
    import matplotlib.pyplot as plt

    config = get_cfg_defaults()
    steps_per_epoches = 200
    total_step = config.train.epoches * steps_per_epoches

    model = torch.nn.Conv2d(3, 64, 3)
    optimizer = torch.optim.Adam(model.parameters(), lr=1e-3, amsgrad=True)

    # lr = MultiScheduler(config, steps_per_epoches)
    # lr_scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr)
    lr_c = CosineWarmupLr(config, optimizer, steps_per_epoches)

    a = []
    for j in range(total_step):
        optimizer.zero_grad()

        # print(j, optimizer.param_groups[0]['lr'])