Example #1
0
import numpy as np

import torch
from torchvision import transforms
from DataLoader import DataLoader

transform_train = transforms.Compose([
    transforms.ToTensor()
])

trainset = DataLoader.A1().get_train_loader()
train_loader = torch.utils.data.DataLoader(trainset, batch_size=50_000, shuffle=True)

train = train_loader.__iter__().next()[0]

print('Mean: {}'.format(np.mean(train.numpy(), axis=(0, 2, 3))))
# Mean: [0.24853915 0.266838   0.2138273 ]
print('STD: {}'.format(np.std(train.numpy(), axis=(0, 2, 3))))
# STD: [0.16978161 0.16967748 0.13661802]
Example #2
0
parser.add_argument('--lr', type=float, default=5e-5, help='Learning rate')
parser.add_argument('--momentum', type=float, default=0, help='Momentum')
parser.add_argument('--weight-decay',
                    type=float,
                    default=2e-4,
                    help='Weight decay')
parser.add_argument('--batch-size', type=int, default=6, help='Batch size')

args = parser.parse_args()
random.seed(args.seed)
torch.manual_seed(args.seed)

# https://github.com/Kaixhin/FCN-semantic-segmentation

# Data
train_dataset = DataLoader.A1().get_train_loader()
val_dataset = DataLoader.A1().get_validation_loader()
train_loader = torch.utils.data.DataLoader(train_dataset,
                                           batch_size=args.batch_size,
                                           shuffle=True,
                                           num_workers=args.workers,
                                           pin_memory=True)
val_loader = torch.utils.data.DataLoader(val_dataset,
                                         batch_size=args.batch_size,
                                         num_workers=args.workers,
                                         pin_memory=True)

# Training/Testing
pretrained_net = FeatureResNet()
pretrained_net.load_state_dict(models.resnet34(pretrained=True).state_dict())