示例#1
0
 def _gaussian_regularization(self,
                              model: Model,
                              tf1_images,
                              tf1_pred_simplex: List[Tensor],
                              head_name="B") -> Tensor:
     """
     calculate predicton simplexes on gaussian noise tf1 images and the kl div of the original prediction simplex.
     :param tf1_images: tf1-transformed images
     :param tf1_pred_simplex: simplex list of tf1-transformed image prediction
     :return:  loss
     """
     _tf1_images_gaussian = self.gaussian_adder(tf1_images)
     _tf1_gaussian_simplex = model.torchnet(_tf1_images_gaussian,
                                            head=head_name)
     assert assert_list(simplex, tf1_pred_simplex)
     assert assert_list(simplex, _tf1_gaussian_simplex)
     assert tf1_pred_simplex.__len__() == _tf1_gaussian_simplex.__len__()
     reg_loss = []
     for __tf1_simplex, __tf1_gaussian_simplex in zip(
             tf1_pred_simplex, _tf1_gaussian_simplex):
         reg_loss.append(
             self.kl_div(__tf1_gaussian_simplex, __tf1_simplex.detach()))
     return sum(reg_loss) / len(reg_loss)  # type: ignore
示例#2
0
            orig_gradient_y, pred_gradient_y
        )
        return self.mse(pred, gt) + self.gdl_weight * gdl_loss


img_transform = transforms.Compose(
    [
        transforms.ToTensor(),
        # transforms.Normalize((0.5), (0.5))
    ]
)
dataset = MNIST(DATA_PATH, transform=img_transform)
dataloader = DataLoader(dataset, batch_size=128, shuffle=True)

model = Model()
model.torchnet = autoencoder()
model.optimizer = torch.optim.Adam(
    model.torchnet.parameters(), lr=1e-3, weight_decay=1e-5
)

config = ConfigManger().parsed_args
if config["loss"] == "mse":
    criterion = nn.MSELoss()
elif config["loss"] == "gdl":
    criterion = gradient_difference_loss(config["weight"])

trainer = MNISTTrainer(
    model=model,
    train_loader=dataloader,
    val_loader=dataloader,
    criterion=nn.MSELoss(),
示例#3
0
    dataloader_params=dataloader_params,
)
prior = get_prior_from_dataset(unlabeled_loader.dataset)
print("prior for unlabeled dataset", prior)
# network part
import warnings

fix_all_seed(int(config.get("Seed", 0)))

with warnings.catch_warnings():
    warnings.filterwarnings("ignore")
    net = SimpleNet(1, len(unlabeled_class_sample_nums))
    optim = RAdam(net.parameters(), lr=1e-4, weight_decay=1e-4)
    scheduler = MultiStepLR(optim, milestones=[50, 80], gamma=0.2)
    model = Model()
    model.torchnet = net
    model.optimizer = optim
    model.scheduler = scheduler

# trainer part
Trainer = {
    "SemiTrainer": SemiTrainer,
    "SemiEntropyTrainer": SemiEntropyTrainer,
    "SemiPrimalDualTrainer": SemiPrimalDualTrainer,
    "SemiWeightedIICTrainer": SemiWeightedIICTrainer,
    "SemiUDATrainer": SemiUDATrainer,
}.get(config["Trainer"]["name"])
assert Trainer

trainer = Trainer(
    model,