def run(config):
    train_dir = config.train.dir

    student_model = get_model(config, model_type).to(device)
    criterion = get_loss(config)
    trainable_params = filter(lambda p: p.requires_grad,
                              student_model.parameters())
    optimizer = get_optimizer(config, trainable_params)
    checkpoint = utils.checkpoint.get_initial_checkpoint(config,
                                                         model_type=model_type)
    if checkpoint is not None:
        last_epoch, step = utils.checkpoint.load_checkpoint(
            student_model, optimizer, checkpoint, model_type=model_type)
    else:
        last_epoch, step = -1, -1
    print('student model from checkpoint: {} last epoch:{}'.format(
        checkpoint, last_epoch))

    scheduler = get_scheduler(config, optimizer, last_epoch)

    print(config.data)
    dataloaders = {
        'train': get_train_dataloader(config),
        'val': get_valid_dataloader(config),
        'test': get_test_dataloader(config)
    }
    writer = SummaryWriter(config.train['student' + '_dir'])
    visualizer = get_visualizer(config)
    train(config, student_model, dataloaders, criterion, optimizer, scheduler,
          writer, visualizer, last_epoch + 1)
Exemplo n.º 2
0
def run(config):
    train_dir = config.train.dir
    model = get_model(config, model_type).to(device)
    print('The nubmer of parameters : %d' % count_parameters(model))
    criterion = get_loss(config)
    optimizer = get_optimizer(config, model)

    checkpoint = utils.checkpoint.get_initial_checkpoint(config,
                                                         model_type=model_type)
    if checkpoint is not None:
        last_epoch, step = utils.checkpoint.load_checkpoint(
            model, optimizer, checkpoint, model_type=model_type)
    else:
        last_epoch, step = -1, -1

    print('from checkpoint: {} last epoch:{}'.format(checkpoint, last_epoch))
    scheduler = get_scheduler(config, optimizer, last_epoch)

    print(config.data)
    dataloaders = {
        'train': get_train_dataloader(config),
        'val': get_valid_dataloader(config),
        'test': get_test_dataloader(config)
    }

    writer = SummaryWriter(config.train[model_type + '_dir'])
    visualizer = get_visualizer(config)
    train(config, model, dataloaders, criterion, optimizer, scheduler, writer,
          visualizer, last_epoch + 1)
def run(config):
    teacher_model = get_model(config, 'teacher').to(device)
    criterion = get_loss(config)

    # for teacher
    trainable_params = filter(lambda p: p.requires_grad,
                              teacher_model.parameters())
    optimizer_t = get_optimizer(config, teacher_model.parameters())
    checkpoint_t = utils.checkpoint.get_initial_checkpoint(
        config, model_type='teacher')
    if checkpoint_t is not None:
        last_epoch_t, step_t = utils.checkpoint.load_checkpoint(
            teacher_model, optimizer_t, checkpoint_t, model_type='teacher')
    else:
        last_epoch_t, step_t = -1, -1
    print('teacher model from checkpoint: {} last epoch:{}'.format(
        checkpoint_t, last_epoch_t))

    scheduler_t = get_scheduler(config, optimizer_t, last_epoch_t)

    print(config.data)
    dataloaders = {
        'train': get_train_dataloader(config),
        'val': get_valid_dataloader(config),
        'test': get_test_dataloader(config)
    }
    writer = SummaryWriter(config.train['teacher' + '_dir'])
    visualizer = get_visualizer(config)
    train(config, teacher_model, dataloaders, criterion, optimizer_t,
          scheduler_t, writer, visualizer, last_epoch_t + 1)
Exemplo n.º 4
0
def run(config):
    teacher_model = get_model(config, 'teacher').to(device)
    student_model = get_model(config, 'student').to(device)
    print('The nubmer of parameters : %d'%count_parameters(student_model))
    criterion = get_loss(config)


    # for teacher
    optimizer_t = None
    checkpoint_t = utils.checkpoint.get_initial_checkpoint(config,
                                                         model_type='teacher')
    if checkpoint_t is not None:
        last_epoch_t, step_t = utils.checkpoint.load_checkpoint(teacher_model,
                                 optimizer_t, checkpoint_t, model_type='teacher')
    else:
        last_epoch_t, step_t = -1, -1
    print('teacher model from checkpoint: {} last epoch:{}'.format(
        checkpoint_t, last_epoch_t))

    # for student
    optimizer_s = get_optimizer(config, student_model)
    checkpoint_s = utils.checkpoint.get_initial_checkpoint(config,
                                                         model_type='student')
    if checkpoint_s is not None:
        last_epoch_s, step_s = utils.checkpoint.load_checkpoint(student_model,
                                 optimizer_s, checkpoint_s, model_type='student')
    else:
        last_epoch_s, step_s = -1, -1
    print('student model from checkpoint: {} last epoch:{}'.format(
        checkpoint_s, last_epoch_s))

    scheduler_s = get_scheduler(config, optimizer_s, last_epoch_s)

    print(config.data)
    dataloaders = {'train':get_train_dataloader(config, get_transform(config)),
                   'val':get_valid_dataloader(config)}
                   #'test':get_test_dataloader(config)}
    writer = SummaryWriter(config.train['student' + '_dir'])
    visualizer = get_visualizer(config)
    result = train(config, student_model, teacher_model, dataloaders,
          criterion, optimizer_s, scheduler_s, writer,
          visualizer, last_epoch_s+1)
    
    print('best psnr : %.3f, best epoch: %d'%(result['best_psnr'], result['best_epoch']))