Ejemplo n.º 1
0
        net.to(device)
        print("Model archticture: ", net)
        traincsv_file = args.data + '/' + 'train.csv'
        validationcsv_file = args.data + '/' + 'val.csv'
        train_img_dir = args.data + '/' + 'train/'
        validation_img_dir = args.data + '/' + 'val/'

        transformation = transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize((0.5, ), (0.5, ))])
        train_dataset = Plain_Dataset(csv_file=traincsv_file,
                                      img_dir=train_img_dir,
                                      datatype='train',
                                      transform=transformation)
        validation_dataset = Plain_Dataset(csv_file=validationcsv_file,
                                           img_dir=validation_img_dir,
                                           datatype='val',
                                           transform=transformation)
        train_loader = DataLoader(train_dataset,
                                  batch_size=batchsize,
                                  shuffle=True,
                                  num_workers=0)
        val_loader = DataLoader(validation_dataset,
                                batch_size=batchsize,
                                shuffle=True,
                                num_workers=0)

        criterion = nn.CrossEntropyLoss()
        optmizer = optim.Adam(net.parameters(), lr=lr)
        Train(epochs, train_loader, val_loader, criterion, optmizer, device)
Ejemplo n.º 2
0
    if args.train:
        net = Deep_Emotion()
        net.to(device)
        print("Model archticture: ", net)
        traincsv_file = args.data + '/' + 'train.csv'
        validationcsv_file = args.data + '/' + 'val.csv'
        train_img_dir = args.data + '/' + 'train/'
        validation_img_dir = args.data + '/' + 'val/'

        transformation2 = tfs.Compose(
            [tfs.ToTensor(), tfs.Normalize((0.5, ), (0.5, ))])
        train_dataset = Plain_Dataset(csv_file=traincsv_file,
                                      img_dir=train_img_dir,
                                      datatype='train',
                                      transform=train_tf)
        validation_dataset = Plain_Dataset(csv_file=validationcsv_file,
                                           img_dir=validation_img_dir,
                                           datatype='val',
                                           transform=transformation2)
        train_loader = DataLoader(train_dataset,
                                  batch_size=batchsize,
                                  shuffle=True,
                                  num_workers=0)
        val_loader = DataLoader(validation_dataset,
                                batch_size=batchsize,
                                shuffle=True,
                                num_workers=0)

        criterion = nn.CrossEntropyLoss()
        optmizer = optim.Adam(net.parameters(), lr=lr, weight_decay=0.0001)
        Train(epochs, train_loader, val_loader, criterion, optmizer, device)