示例#1
0
    print("[INFO] Setting SEED: None")

if (torch.cuda.is_available() == False):
    print("[WARNING] CUDA is not available.")

if (args.finetune == "True" or args.finetune == "true"):
    print("[INFO] Finetune set to True, the backbone will be finetuned.")
print("[INFO] Found", str(torch.cuda.device_count()), "GPU(s) available.")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("[INFO] Device type:", str(device))

from datamanager import DataManager

manager = DataManager(args.seed)
num_classes = manager.get_num_classes(args.dataset)
train_transform = manager.get_train_transforms("lineval", args.dataset)
train_loader, _ = manager.get_train_loader(dataset=args.dataset,
                                           data_type="single",
                                           data_size=args.data_size,
                                           train_transform=train_transform,
                                           repeat_augmentations=None,
                                           num_workers=args.num_workers,
                                           drop_last=False)

test_loader = manager.get_test_loader(args.dataset, args.data_size)

if (args.backbone == "conv4"):
    from backbones.conv4 import Conv4
    feature_extractor = Conv4(flatten=True)
elif (args.backbone == "resnet8"):
    from backbones.resnet_small import ResNet, BasicBlock
示例#2
0
print("[INFO] Device type: " + str(device))

# The datamanager is a separate class that returns
# appropriate data loaders and information based
# on the method and dataset selected.
from datamanager import DataManager

manager = DataManager(args.seed)
num_classes = manager.get_num_classes(args.dataset)

if (args.method == "relationnet"):
    from methods.relationnet import Model
    model = Model(feature_extractor, device, aggregation=args.aggregation)
    print("[INFO][RelationNet] TOT augmentations (K): " + str(args.K))
    print("[INFO][RelationNet] Aggregation function: " + str(args.aggregation))
    train_transform = manager.get_train_transforms(args.method, args.dataset)
    train_loader, _ = manager.get_train_loader(dataset=args.dataset,
                                               data_type="multi",
                                               data_size=args.data_size,
                                               train_transform=train_transform,
                                               repeat_augmentations=args.K,
                                               num_workers=args.num_workers,
                                               drop_last=False)
elif (args.method == "standard"):
    from methods.standard import StandardModel
    model = StandardModel(feature_extractor,
                          num_classes,
                          tot_epochs=args.epochs)
    if (args.dataset == "stl10"):
        train_transform = manager.get_train_transforms("finetune",
                                                       args.dataset)