Ejemplo n.º 1
0
def main():
    batch_size = 64
    test_batch_size = 64
    lr = 0.1
    momentum = 0.9
    epochs = 100
    epoch_step = 30
    weight_decay = 1e-4
    teacher_pretrained_path = "{}/dan_resnet50_amazon_2_webcam.pth".format(save_dir)
    student_pretrained = False
    device = torch.device("cuda")

    webcam = os.path.expanduser("~/datasets/webcam/images")
    amazon = os.path.expanduser("~/datasets/amazon/images")
    dslr = os.path.expanduser("~/datasets/dslr/images")

    train_loader_source = DA_datasets.office_loader(amazon, batch_size, 0)
    train_loader_target = DA_datasets.office_loader(webcam, batch_size, 0)
    testloader_target = DA_datasets.office_test_loader(webcam, test_batch_size, 0)

    logger = VisdomLogger(port=10999)
    logger = LoggerForSacred(logger)

    teacher_model = DAN_model.DANNet_ResNet(ResNet.resnet50, True)
    student_model = DAN_model.DANNet_ResNet(ResNet.resnet34, student_pretrained)

    if teacher_pretrained_path != "":
        teacher_model.load_state_dict(torch.load(teacher_pretrained_path))

    if torch.cuda.device_count() > 1:
        teacher_model = torch.nn.DataParallel(teacher_model).to(device)
        student_model = torch.nn.DataParallel(student_model).to(device)

    distiller_model = od_distiller.Distiller_DAN(teacher_model, student_model)

    if torch.cuda.device_count() > 1:
        distiller_model = torch.nn.DataParallel(distiller_model).to(device)

    if torch.cuda.device_count() > 1:
        optimizer = torch.optim.SGD(list(student_model.parameters()) + list(distiller_model.module.Connectors.parameters()),
                                    lr, momentum=momentum, weight_decay=weight_decay, nesterov=True)
    else:
        optimizer = torch.optim.SGD(list(student_model.parameters()) + list(distiller_model.Connectors.parameters()),
                                    lr, momentum=momentum, weight_decay=weight_decay, nesterov=True)

    scheduler = torch.optim.lr_scheduler.StepLR(optimizer, epoch_step)

    od_kd_without_label(epochs, teacher_model, student_model, distiller_model, optimizer, train_loader_target,
                        testloader_target, device, logger=logger, scheduler=scheduler)
Ejemplo n.º 2
0
    return model_dan, optimizer, best_acc


if __name__ == "__main__":
    batch_size = 32
    test_batch_size = 32

    #train_path = "/home/ens/AN88740/dataset/webcam/images"
    #test_path = "/home/ens/AN88740/dataset/amazon/images"

    webcam = os.path.expanduser("~/datasets/webcam/images")
    amazon = os.path.expanduser("~/datasets/amazon/images")
    dslr = os.path.expanduser("~/datasets/dslr/images")

    epochs = 200
    lr = 0.01
    device = torch.device("cuda")

    train_loader_source = DA_datasets.office_loader(webcam, batch_size, 0)
    train_loader_target = DA_datasets.office_loader(amazon, batch_size, 0)
    testloader_1_target = DA_datasets.office_test_loader(amazon, test_batch_size, 0)

    logger = VisdomLogger(port=9000)
    logger = LoggerForSacred(logger)

    #model_dan = DAN_model.DANNet_ResNet(ResNet.resnet50, True).to(device)
    model_dan = DAN_model.DANNetVGG16(models.vgg16, True).to(device)

    optimizer = torch.optim.SGD(model_dan.parameters(), momentum=0.9, lr=lr, weight_decay=5e-4)
    dann_grl_train(epochs, lr, model_dan, train_loader_source, device, train_loader_target, testloader_1_target, optimizer, logger=logger,
                   logger_id="", scheduler=None, is_debug=False)
Ejemplo n.º 3
0
def main():
    batch_size = 32
    test_batch_size = 32

    webcam = os.path.expanduser("~/datasets/webcam/images")
    amazon = os.path.expanduser("~/datasets/amazon/images")
    dslr = os.path.expanduser("~/datasets/dslr/images")
    is_debug = False

    epochs = 400
    init_lr_da = 0.001
    init_lr_kd = 0.001
    momentum = 0.9
    weight_decay = 5e-4
    device = torch.device("cuda")
    T = 20
    alpha = 0.3
    init_beta = 0.1
    end_beta = 0.9

    student_pretrained = True

    if torch.cuda.device_count() > 1:
        teacher_model = nn.DataParallel(
            DAN_model.DANNet_ResNet(ResNet.resnet50, True)).to(device)
        student_model = nn.DataParallel(
            DAN_model.DANNet_ResNet(ResNet.resnet34,
                                    student_pretrained)).to(device)
    else:
        teacher_model = DAN_model.DANNet_ResNet(ResNet.resnet50,
                                                True).to(device)
        student_model = DAN_model.DANNet_ResNet(ResNet.resnet34,
                                                student_pretrained).to(device)

    growth_rate = torch.log(torch.FloatTensor(
        [end_beta / init_beta])) / torch.FloatTensor([epochs])

    optimizer_da = torch.optim.SGD(list(teacher_model.parameters()) +
                                   list(student_model.parameters()),
                                   init_lr_da,
                                   momentum=momentum,
                                   weight_decay=weight_decay)

    optimizer_kd = torch.optim.SGD(list(teacher_model.parameters()) +
                                   list(student_model.parameters()),
                                   init_lr_kd,
                                   momentum=momentum,
                                   weight_decay=weight_decay)

    source_dataloader = DA_datasets.office_loader(amazon, batch_size, 1)
    target_dataloader = DA_datasets.office_loader(webcam, batch_size, 1)
    target_testloader = DA_datasets.office_test_loader(webcam, test_batch_size,
                                                       1)

    logger = LoggerForSacred(None, None, True)

    grl_multi_target_hinton_alt(init_lr_da,
                                device,
                                epochs,
                                T,
                                alpha,
                                growth_rate,
                                init_beta,
                                source_dataloader,
                                target_dataloader,
                                target_testloader,
                                optimizer_da,
                                optimizer_kd,
                                teacher_model,
                                student_model,
                                logger=logger,
                                scheduler=None,
                                is_debug=False)