Ejemplo n.º 1
0
                    help='number of training images per object category')
parser.add_argument('--num_val',
                    type=int,
                    default=100,
                    help='number of validation images per object category')
parser.add_argument('--loaders',
                    type=int,
                    default=4,
                    help='number of parallel data loading processes')
parser.add_argument('--batch_size', type=int, default=32)
args = parser.parse_args()

pipeline.initialize(args)

## load model : image --> reflectance x normals x depth x lighting
model = models.Decomposer().cuda()

## get a data loader for train and val sets
train_set = pipeline.IntrinsicDataset(args.data_path,
                                      args.train_sets,
                                      args.intrinsics,
                                      array=args.array,
                                      size_per_dataset=args.num_train)
train_loader = torch.utils.data.DataLoader(train_set,
                                           batch_size=args.batch_size,
                                           num_workers=args.loaders,
                                           shuffle=True)

val_set = pipeline.IntrinsicDataset(args.data_path,
                                    args.val_sets,
                                    args.intrinsics,
                    default=100,
                    help='number of training images per object category')
parser.add_argument('--num_val',
                    type=int,
                    default=100,
                    help='number of validation images per object category')
parser.add_argument('--loaders',
                    type=int,
                    default=4,
                    help='number of parallel data loading processes')
parser.add_argument('--batch_size', type=int, default=32)
args = parser.parse_args()

print("inside ai function")
## load model : image --> reflectance x normals x depth x lighting
model = models.Decomposer()
train_set = pipeline.IntrinsicDataset(args.data_path,
                                      args.train_sets,
                                      args.intrinsics,
                                      array=args.array,
                                      size_per_dataset=args.num_train)
train_loader = torch.utils.data.DataLoader(train_set,
                                           batch_size=args.batch_size,
                                           num_workers=args.loaders,
                                           shuffle=True)

val_set = pipeline.IntrinsicDataset(args.data_path,
                                    args.val_sets,
                                    args.intrinsics,
                                    array=args.array,
                                    size_per_dataset=args.num_val)