Пример #1
0
def pretrain(run_id, model, dataset, device, args):
    if args.optimizer == 'sgd':
        optimizer = optim.SGD(model.parameters(),
                              lr=args.lr,
                              weight_decay=args.wd,
                              momentum=args.momentum)
    elif args.optimizer == 'adam':
        optimizer = optim.Adam(model.parameters(),
                               lr=args.lr,
                               weight_decay=args.wd,
                               betas=(0.9, 0.98),
                               eps=1e-09,
                               amsgrad=True)
    else:
        raise ValueError('Invalid optimizer!')

    if args.use_dist:
        sampler = DistributedSampler(dataset, shuffle=True)
        data_loader = DataLoader(dataset,
                                 batch_size=args.batch_size,
                                 num_workers=args.num_workers,
                                 shuffle=(sampler is None),
                                 pin_memory=True,
                                 drop_last=True,
                                 sampler=sampler)
    else:
        data_loader = DataLoader(dataset,
                                 batch_size=args.batch_size,
                                 num_workers=args.num_workers,
                                 shuffle=True,
                                 pin_memory=True,
                                 drop_last=True)

    model.train()
    for epoch in range(args.pretrain_epochs):
        losses = []
        accuracies = []
        if args.use_dist:
            data_loader.sampler.set_epoch(epoch)
        adjust_learning_rate(optimizer, args.lr, epoch, args.pretrain_epochs,
                             args)
        with tqdm(data_loader,
                  desc=f'EPOCH [{epoch + 1}/{args.pretrain_epochs}]'
                  ) as progress_bar:
            for x1, _, x2, __ in progress_bar:
                x1 = x1.cuda(device, non_blocking=True)
                x2 = x2.cuda(device, non_blocking=True)

                loss = model(x1, x2)

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

                losses.append(loss.item())

                progress_bar.set_postfix({
                    'Loss': np.mean(losses),
                    'Acc': np.mean(accuracies)
                })
Пример #2
0
Файл: cpc.py Проект: Y-Kanan/MME
def pretrain(run_id, model, dataset, device, args):
    if args.optimizer == 'sgd':
        optimizer = optim.SGD(model.parameters(),
                              lr=args.lr,
                              weight_decay=args.wd,
                              momentum=args.momentum)
    elif args.optimizer == 'adam':
        optimizer = optim.Adam(model.parameters(),
                               lr=args.lr,
                               weight_decay=args.wd,
                               betas=(0.9, 0.98),
                               eps=1e-09,
                               amsgrad=True)
    else:
        raise ValueError('Invalid optimizer!')

    criterion = nn.CrossEntropyLoss().cuda(device)

    data_loader = DataLoader(dataset,
                             batch_size=args.batch_size,
                             num_workers=args.num_workers,
                             shuffle=True,
                             pin_memory=True,
                             drop_last=True)

    model.train()
    for epoch in range(args.pretrain_epochs):
        losses = []
        accuracies = []
        adjust_learning_rate(optimizer, args.lr, epoch, args.pretrain_epochs,
                             args)
        with tqdm(data_loader,
                  desc=f'EPOCH [{epoch + 1}/{args.pretrain_epochs}]'
                  ) as progress_bar:
            for x, _ in progress_bar:
                x = x.cuda(device, non_blocking=True)

                output, target = model(x)

                loss = criterion(output, target)
                acc = logits_accuracy(output, target, topk=(1, ))[0]
                accuracies.append(acc)

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

                losses.append(loss.item())

                progress_bar.set_postfix({
                    'Loss': np.mean(losses),
                    'Acc': np.mean(accuracies)
                })