Exemple #1
0
print_args(args)

result = open(os.path.join(args.result, "OfficeHome_HAFN_" + args.task + '_' + args.post + '.' + args.repeat +  "_score.txt"), "a")

t_root = os.path.join(args.data_root)
t_label = os.path.join(args.data_root, args.target + "_shared.txt")

data_transform = transforms.Compose([
    transforms.Scale((256, 256)),
    transforms.CenterCrop((224, 224)),
    transforms.ToTensor(),
    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])

t_set = OfficeImage(t_root, t_label, data_transform)
assert len(t_set) == get_dataset_length(args.target + '_shared')

t_loader = torch.utils.data.DataLoader(t_set, batch_size=args.batch_size,
    shuffle=args.shuffle, num_workers=args.num_workers)

netG = ResBase50().cuda()
netF = ResClassifier(class_num=args.class_num, extract=False).cuda()
netG.eval()
netF.eval()

for epoch in range(args.epoch/2, args.epoch + 1):
    if epoch % 10 != 0:
        continue
    netG.load_state_dict(torch.load(os.path.join(args.snapshot, "OfficeHome_HAFN_" + args.task + "_netG_" + args.post + "." + args.repeat + "_" + str(epoch) + ".pth")))
    netF.load_state_dict(torch.load(os.path.join(args.snapshot, "OfficeHome_HAFN_" + args.task + "_netF_" + args.post + "." + args.repeat + "_" + str(epoch) + ".pth")))
Exemple #2
0
beta2 = args.beta2
gpu_id = args.gpu_id
num_classes = args.num_classes
threshold = args.threshold
log_interval = args.log_interval
cls_epoches = args.cls_epoches
gan_epoches = args.gan_epoches
alpha = args.alpha

s1_root = os.path.join(data_root, args.s1, "images")
s1_label = os.path.join(data_root, args.s1, "label.txt")
s2_root = os.path.join(data_root, args.s2, "images")
s2_label = os.path.join(data_root, args.s2, "label.txt")
t_root = os.path.join(data_root, args.t, "images")
t_label = os.path.join(data_root, args.t, "label.txt")
s1_set = OfficeImage(s1_root, s1_label, split="train")
s2_set = OfficeImage(s2_root, s2_label, split="train")
t_set = OfficeImage(t_root, t_label, split="train")
t_set_test = OfficeImage(t_root, t_label, split="test")


s1_loader_raw = torch.utils.data.DataLoader(s1_set, batch_size=batch_size,
    shuffle=shuffle, num_workers=num_workers)
s2_loader_raw = torch.utils.data.DataLoader(s2_set, batch_size=batch_size,
    shuffle=shuffle, num_workers=num_workers)
t_loader_raw = torch.utils.data.DataLoader(t_set, batch_size=batch_size,
    shuffle=shuffle, num_workers=num_workers)
t_loader_test = torch.utils.data.DataLoader(t_set_test, batch_size=batch_size,
    shuffle=False, num_workers=num_workers)

s1_loader_raw1 = torch.utils.data.DataLoader(s1_set, batch_size=1,
Exemple #3
0
print_args(args)

source_root = os.path.join(args.data_root)
source_label = os.path.join(args.data_root, args.source + ".txt")
target_root = os.path.join(args.data_root)
target_label = os.path.join(args.data_root, args.target + "_shared.txt")

train_transform = transforms.Compose([
    transforms.Scale((256, 256)),
    transforms.RandomCrop((224, 224)),
    transforms.RandomHorizontalFlip(),
    transforms.ToTensor(),
    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])

source_set = OfficeImage(source_root, source_label, train_transform)
target_set = OfficeImage(target_root, target_label, train_transform)

assert len(source_set) == get_dataset_length(args.source)
assert len(target_set) == get_dataset_length(args.target + '_shared')

source_loader = torch.utils.data.DataLoader(source_set, batch_size=args.batch_size,
    shuffle=args.shuffle, num_workers=args.num_workers)
target_loader = torch.utils.data.DataLoader(target_set, batch_size=args.batch_size,
    shuffle=args.shuffle, num_workers=args.num_workers)

netG = ResBase50().cuda()
netF = ResClassifier(class_num=args.class_num, extract=args.extract, dropout_p=args.dropout_p).cuda()
netF.apply(weights_init)