示例#1
0
    'poly_train': True
}

# Path.
check_mkdir(ckpt_path)
check_mkdir(os.path.join(ckpt_path, exp_name))
vis_path = os.path.join(ckpt_path, exp_name, 'log')
check_mkdir(vis_path)
log_path = os.path.join(ckpt_path, exp_name,
                        str(datetime.datetime.now()) + '.txt')
writer = SummaryWriter(log_dir=vis_path, comment=exp_name)

# Transform Data.
joint_transform = joint_transforms.Compose([
    joint_transforms.RandomRotate(),
    joint_transforms.Resize((args['scale'], args['scale']))
])
val_joint_transform = joint_transforms.Compose(
    [joint_transforms.Resize((args['scale'], args['scale']))])
img_transform = transforms.Compose([
    transforms.ToTensor(),
    transforms.Normalize([0.485, 0.456, 0.406],
                         [0.229, 0.224, 0.225])  # maybe can optimized.
])
target_transform = transforms.ToTensor()

# Prepare Data Set.
train_set = ImageFolder(msd_training_root, joint_transform, img_transform,
                        target_transform)
print("Train set: {}".format(train_set.__len__()))
train_loader = DataLoader(train_set,
示例#2
0
文件: train.py 项目: xw-hu/FSDNet
    'backbone': 'mobilenet',  # 'resnet', 'xception', 'drn', 'mobilenet'],
    'out_stride': 16,  # 8 or 16
    'sync_bn': None,  # whether to use sync bn (default: auto)
    'freeze_bn': False,
    'pre_train': True
}

transform = transforms.Compose([
    transforms.ToTensor()
    #transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])

to_pil = transforms.ToPILImage()

joint_transform = joint_transforms.Compose([
    joint_transforms.Resize((args['img_size_h'], args['img_size_w'])),
    #joint_transforms.RandomCrop(args['crop_size']),
    joint_transforms.RandomHorizontallyFlip()
])

joint_transform_val = joint_transforms.Compose([
    joint_transforms.Resize((args['img_size_h'], args['img_size_w'])),
])

train_set = ImageFolder(train_cuhkshadow_path,
                        transform=transform,
                        target_transform=transform,
                        joint_transform=joint_transform,
                        is_train=True,
                        batch_size=args['train_batch_size'])
train_loader = DataLoader(train_set,