Example #1
0
print('==> Training', args.student if args.name is None else args.name)
print('==> Building model..')

# get models as teachers and students
teachers, student = get_model(args, config, device="cuda")

print("==> Teacher(s): ", " ".join([teacher.__name__ for teacher in teachers]))
print("==> Student: ", args.student)

dims = [student.out_dims[i] for i in eval(args.out_layer)]
print("dims:", dims)

update_parameters = [{'params': student.parameters()}]

if args.adv:
    discriminators = discriminator.Discriminators(dims, grl=args.grl)
    for d in discriminators.discriminators:
        d = d.to(device)
        if device == "cuda":
            d = torch.nn.DataParallel(d)
        update_parameters.append({'params': d.parameters(), "lr": args.d_lr})

print(args)

if args.resume:
    # Load checkpoint.
    print('==> Resuming from checkpoint..')
    assert os.path.isdir('checkpoint'), 'Error: no checkpoint directory found!'
    checkpoint = torch.load(
        '/workspace/mnt/storage/yangdecheng/yangdecheng/models/checkpoint/%s-generator/ckpt.t7'
        % "_".join(args.teachers))
Example #2
0
print('==> Training', args.student if args.name is None else args.name)
print('==> Building model..')

# get models as teachers and students
teachers, student = get_model(args, config, device="cuda")

print("==> Teacher(s): ", " ".join([teacher.__name__ for teacher in teachers]))
print("==> Student: ", args.student)

dims = [student.out_dims[i] for i in eval(args.out_layer)]
print("dims:", dims)

update_parameters = [{'params': student.parameters()}]

if args.adv:
    discriminators = discriminator.Discriminators(dims)
    for d in discriminators.discriminators:
        d = d.to(device)
        if device == "cuda":
            d = torch.nn.DataParallel(d)
        update_parameters.append({'params': d.parameters(), "lr": args.d_lr})

print(args)

if args.resume:
    # Load checkpoint.
    print('==> Resuming from checkpoint..')
    assert os.path.isdir('checkpoint'), 'Error: no checkpoint directory found!'
    checkpoint = torch.load('./checkpoint/%s-generator/ckpt.t7' % "_".join(args.teachers))
    student.load_state_dict(checkpoint['net'])
    start_epoch = checkpoint['epoch']