示例#1
0
 def __init__(
     self,
     sorter_type,
     seq_len=None,
     sorter_state_dict=None,
 ):
     super(RankLoss, self).__init__()
     self.sorter = model.model_loader(sorter_type, seq_len,
                                      sorter_state_dict)
    def __init__(self, sorter_type, seq_len=None, sorter_state_dict=None, lbd=0, t_easy=0.05):
        super(SpearmanLoss, self).__init__()
        self.sorter = model_loader(sorter_type, seq_len, sorter_state_dict)

        self.criterion_mse = torch.nn.MSELoss(reduction='none')
        self.criterionl1 = torch.nn.L1Loss()

        self.lbd = lbd
        self.t_easy = t_easy
示例#3
0
    def __init__(self,
                 sorter_type,
                 seq_len=None,
                 sorter_state_dict=None,
                 margin=0.2,
                 nmax=1):
        super(RankHardLoss, self).__init__()
        self.nmax = nmax
        self.margin = margin

        self.sorter = model.model_loader(sorter_type, seq_len,
                                         sorter_state_dict)
示例#4
0
    def __init__(self,
                 sorter_type,
                 seq_len=None,
                 sorter_state_dict=None,
                 lbd=0):
        super(SpearmanLoss, self).__init__()
        self.sorter = model.model_loader(sorter_type, seq_len,
                                         sorter_state_dict)

        self.criterion_mse = torch.nn.MSELoss()
        self.criterionl1 = torch.nn.L1Loss()

        self.lbd = lbd
示例#5
0
    dset = dataset.SeqDataset(args.seq_len, dist=args.dist)

    train_loader = DataLoader(dset,
                              batch_size=args.batch_size,
                              shuffle=False,
                              num_workers=2,
                              sampler=SubsetRandomSampler(
                                  range(int(len(dset) * 0.1), len(dset))))
    val_loader = DataLoader(dset,
                            batch_size=args.batch_size,
                            shuffle=False,
                            num_workers=2,
                            sampler=SubsetRandomSampler(
                                range(int(len(dset) * 0.1))))

    model = model.model_loader(args.model_type, args.seq_len)
    model.to(device)

    optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
    lr_scheduler = StepLR(optimizer, args.lr_steps, 0.5)

    criterion = nn.L1Loss()

    print("Nb parameters:", utils.count_parameters(model))

    start_epoch = 0
    epochs_without_improvement = 0
    patience = 2
    best_rec = 10000
    for epoch in range(start_epoch, args.mepoch):
        is_best = False