Exemple #1
0
def experiment_run(args):
    device = 'cuda' if torch.cuda.is_available() else 'cpu'

    trainloader, testloader = get_dataloaders(args)

    auto_change_dir(args.exp_name)

    print("Using device", device)  # todo: cambiar a logger

    # This will download a model with it's weights. For using another model just instantiate it here.
    teacher = load_model(args.teacher, trainable=False, device=device)
    student = load_model(args.student)

    teacher.eval()
    student.train()

    best_acc = 0
    start_epoch = 0

    feat_loss = parse_distillation_loss(args)
    kd_loss = last_layer_loss_parser(args.log_dist, string_input=True)

    eval_criterion = torch.nn.CrossEntropyLoss()
    optimizer = optim.Adam(student.parameters(),
                           lr=args.lr)  # todo: evaluar si mover en exp
    flatten = args.student.split("_")[0] == "linear"
    layer = args.layer
    idxs = [layer]
    auto_change_dir(",".join([str(i) for i in idxs]))

    writer = SummaryWriter("tb_logs")  # todo mover dentro de exp

    exp = FeatureExperiment(device=device,
                            student=student,
                            teacher=teacher,
                            optimizer=optimizer,
                            kd_criterion=kd_loss,
                            ft_criterion=feat_loss,
                            eval_criterion=eval_criterion,
                            linear=flatten,
                            writer=writer,
                            testloader=testloader,
                            trainloader=trainloader,
                            best_acc=best_acc,
                            idxs=idxs,
                            use_regressor=args.feat_dist == "hint",
                            args=args)
    if exp.epoch + 1 < args.epochs:
        print("training", exp.epoch, "-", args.epochs)
        for epoch in range(exp.epoch, args.epochs):
            exp.train_epoch()
            exp.test_epoch()
        exp.save_model()
    else:
        print("epochs surpassed")
Exemple #2
0
def main(args):

    #global args
    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    args = parser.parse_args()

    trainloader, testloader = get_dataloaders(args.batch_size)
    auto_change_dir(args.exp_name)

    teacher = load_model(args.teacher, trainable=False, device=device)
    student = load_model(args.student)

    teacher.eval()
    student.train()

    #rgs.exp_name is not None:
    # os.chdir("/home/jp/Memoria/repo/Cifar10/ResNet101/") #Linux
    #os.chdir("test")  # Windows

    best_acc = 0
    start_epoch = 0

    criterion = parse_distillation_loss(args)  # CD a distillation
    eval_criterion = torch.nn.CrossEntropyLoss()
    optimizer = optim.Adam(student.parameters(), lr=args.lr)

    flatten = args.student.split("_")[0] == "linear"

    writer = SummaryWriter("tb_logs")
    exp = DistillationExperiment(
        device=device,  # Todo mover arriba
        student=student,
        teacher=teacher,
        optimizer=optimizer,
        criterion=criterion,
        eval_criterion=eval_criterion,
        linear=flatten,
        writer=writer,
        testloader=testloader,
        trainloader=trainloader,
        best_acc=best_acc,
        args=args)

    for epoch in range(start_epoch, args.epochs):

        exp.train_epoch()
        exp.test_epoch()
    exp.save_model()
Exemple #3
0
def main(args):
    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    print("using device", device)

    #trainloader, testloader, classes = load_cifar10(args)

    trainloader, testloader = get_dataloaders(args.batch_size)
    auto_change_dir(args.exp_name)

    net = load_model(args.model, trainable=False, device=device)
    best_acc = 0
    start_epoch = 0

    writer = SummaryWriter("teacher_trainer")
    criterion = nn.CrossEntropyLoss()
    #optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=5e-4)
    optimizer = optim.Adam(net.parameters(), lr=args.lr)

    exp = Experiment(device=device,
                     net=net,
                     optimizer=optimizer,
                     criterion=criterion,
                     linear=args.model.split("_")[0] == "linear",
                     writer=writer,
                     testloader=testloader,
                     trainloader=trainloader,
                     best_acc=best_acc,
                     start_epoch=start_epoch)

    exp.test_epoch()
    exp.save_model(save_checkpoints=False,
                   overwrite_record=True,
                   overwrite_epoch=True)
Exemple #4
0
    # remove these hooks
    #for h in hooks:
    #    h.remove()
    #print(summary["ReLU-39"].mean())

    print(summary)

    return model,summary


if __name__ == '__main__':
    from lib.utils.imagenet.utils import load_model


    device = 'cuda' if torch.cuda.is_available() else 'cpu'


    model=load_model("MobileNetV2")

    model,s=summary(model,(3,224,224),"ReLU-39")
    print(s["ReLU-39"].mean())

    x = [torch.rand(2,3,224,224)+1]
    # print(type(x[0]))

    # make a forward pass
    # print(x.shape)
    model(*x)

    print(s["ReLU-39"].mean())