Esempio n. 1
0
t_loader_test = torch.utils.data.DataLoader(t_set_test,
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_workers)

extractor = Extractor()
s1_classifier = Classifier(num_classes=num_classes)
s2_classifier = Classifier(num_classes=num_classes)
s3_classifier = Classifier(num_classes=num_classes)
s1_t_discriminator = Discriminator()
s2_t_discriminator = Discriminator()
s3_t_discriminator = Discriminator()

extractor.load_state_dict(
    torch.load(
        osp.join(
            MAIN_DIR,
            "MSDA/A_W_2_D_Open/bvlc_A_W_2_D/pretrain/bvlc_extractor.pth")))
extractor = nn.DataParallel(extractor)
extractor = extractor.cuda()

s1_classifier.load_state_dict(
    torch.load(
        osp.join(
            MAIN_DIR,
            "MSDA/A_W_2_D_Open/bvlc_A_W_2_D/pretrain/office-home/bvlc_s1_cls.pth"
        )))
s2_classifier.load_state_dict(
    torch.load(
        osp.join(
            MAIN_DIR,
Esempio n. 2
0
s2_loader_raw = torch.utils.data.DataLoader(s2_set, batch_size=batch_size,
    shuffle=shuffle, num_workers=num_workers)
t_loader_raw = torch.utils.data.DataLoader(t_set, batch_size=batch_size,
    shuffle=shuffle, num_workers=num_workers)
t_loader_test = torch.utils.data.DataLoader(t_set_test, batch_size=batch_size,
    shuffle=False, num_workers=num_workers)

s1_loader_raw1 = torch.utils.data.DataLoader(s1_set, batch_size=1,
    shuffle=shuffle, pin_memory=True)
s2_loader_raw1 = torch.utils.data.DataLoader(s2_set, batch_size=1,
    shuffle=shuffle, pin_memory=True)
t_loader_raw1 = torch.utils.data.DataLoader(t_set, batch_size=1,
    shuffle=shuffle,pin_memory=True)

extractor = Extractor().cpu()
extractor.load_state_dict(torch.load("/Users/bytedabce/PycharmProjects/mix_net/train_eval/pre_train_model/bvlc_extractor.pth"))
s1_classifier = Classifier(num_classes=num_classes).cpu()
s2_classifier = Classifier(num_classes=num_classes).cpu()
s1_classifier.load_state_dict(torch.load("/Users/bytedabce/PycharmProjects/mix_net/train_eval/pre_train_model/bvlc_s1_cls.pth"))
s2_classifier.load_state_dict(torch.load("/Users/bytedabce/PycharmProjects/mix_net/train_eval/pre_train_model/bvlc_s2_cls.pth"))
s1_t_discriminator = Discriminator().cpu()
s2_t_discriminator = Discriminator().cpu()




def print_log(step, epoch, epoches, lr, l1, l2, l3, l4, l5, l6, l7, l8, flag, ploter, count):
    print ("Step [%d/%d] Epoch [%d/%d] lr: %f, s1_cls_loss: %.4f, s2_cls_loss: %.4f, s1_t_dis_loss: %.4f, " \
          "s2_t_dis_loss: %.4f, s1_t_confusion_loss_s1: %.4f, s1_t_confusion_loss_t: %.4f, " \
          "s2_t_confusion_loss_s2: %.4f, s2_t_confusion_loss_t: %.4f, selected_source: %s" \
          % (step, steps, epoch, epoches, lr, l1, l2, l3, l4, l5, l6, l7, l8, flag),
Esempio n. 3
0
t_loader_raw = torch.utils.data.DataLoader(t_set,
                                           batch_size=batch_size,
                                           shuffle=shuffle,
                                           num_workers=num_workers)
t_loader_test = torch.utils.data.DataLoader(t_set_test,
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_workers)

extractor = Extractor()
s1_classifier = Classifier(num_classes=num_classes)
s2_classifier = Classifier(num_classes=num_classes)
s1_t_discriminator = Discriminator()
s2_t_discriminator = Discriminator()

extractor.load_state_dict(
    torch.load(osp.join(MAIN_DIR, "MSDA/pretrain/office/bvlc_extractor.pth")))
extractor = nn.DataParallel(extractor)
extractor = extractor.cuda()

s1_classifier.load_state_dict(
    torch.load(osp.join(MAIN_DIR, "MSDA/pretrain/office/bvlc_s1_cls.pth")))
s2_classifier.load_state_dict(
    torch.load(osp.join(MAIN_DIR, "MSDA/pretrain/office/bvlc_s2_cls.pth")))
s1_classifier = nn.DataParallel(s1_classifier)
s2_classifier = nn.DataParallel(s2_classifier)
s1_classifier = s1_classifier.cuda()
s2_classifier = s2_classifier.cuda()

s1_t_discriminator = nn.DataParallel(s1_t_discriminator)
s1_t_discriminator = s1_t_discriminator.cuda()
s2_t_discriminator = nn.DataParallel(s2_t_discriminator)
Esempio n. 4
0
s2_loader_raw = torch.utils.data.DataLoader(s2_set,
                                            batch_size=batch_size,
                                            shuffle=shuffle,
                                            num_workers=num_workers)
t_loader_raw = torch.utils.data.DataLoader(t_set,
                                           batch_size=batch_size,
                                           shuffle=shuffle,
                                           num_workers=num_workers)
t_loader_test = torch.utils.data.DataLoader(t_set_test,
                                            batch_size=batch_size,
                                            shuffle=False,
                                            num_workers=num_workers)

extractor = Extractor().cuda(gpu_id)
extractor.load_state_dict(
    torch.load(
        "/home/xuruijia/ZJY/ADW/bvlc_A_W_2_D/pretrain/bvlc_extractor.pth"))
s1_classifier = Classifier(num_classes=num_classes).cuda(gpu_id)
s2_classifier = Classifier(num_classes=num_classes).cuda(gpu_id)
s1_classifier.load_state_dict(
    torch.load("/home/xuruijia/ZJY/ADW/bvlc_A_W_2_D/pretrain/bvlc_s1_cls.pth"))
s2_classifier.load_state_dict(
    torch.load("/home/xuruijia/ZJY/ADW/bvlc_A_W_2_D/pretrain/bvlc_s2_cls.pth"))
s1_t_discriminator = Discriminator().cuda(gpu_id)
s2_t_discriminator = Discriminator().cuda(gpu_id)


def print_log(step, epoch, epoches, lr, l1, l2, l3, l4, l5, l6, l7, l8, flag,
              ploter, count):
    print "Step [%d/%d] Epoch [%d/%d] lr: %f, s1_cls_loss: %.4f, s2_cls_loss: %.4f, s1_t_dis_loss: %.4f, " \
          "s2_t_dis_loss: %.4f, s1_t_confusion_loss_s1: %.4f, s1_t_confusion_loss_t: %.4f, " \
target_features, target_gas_labels, target_liquid_labels = info_transfer2FloatTensor(target_features, 
																						target_labels)

source_training_liquid_labels = source_training_liquid_labels / math.sqrt(1000)
source_validation_liquid_labels = source_validation_liquid_labels / math.sqrt(1000)
target_liquid_labels =target_liquid_labels / math.sqrt(1000)
length_target = target_gas_labels.size()[0]

target_dataset = Data.TensorDataset(target_features,target_liquid_labels)
Batch_size = 128
target_loader = Data.DataLoader(dataset=target_dataset, batch_size=Batch_size,
								shuffle=False, num_workers=2)

E = Extractor()
R = Regressor()
E.load_state_dict(torch.load('E_l2.pkl'))
R.load_state_dict(torch.load('R_l2.pkl'))
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
E.to(device)
R.to(device)
prediction_target, reference_target = predict(target_loader, E, R)

#plot
standard_line_x = [0, 6500]
standard_line_y = [0, 6500]
error_line_x_p500 = [0, 6000]
error_line_y_p500 = [500, 6500]
error_line_x_n500 = [500, 6500]
error_line_y_n500 = [0, 6000]
l_standar = plt.plot(standard_line_x, standard_line_y, 'k-', label='standard')
l_p500 = plt.plot(error_line_x_p500, error_line_y_p500, ':', color='lime', label='$\pm500kg/h$')