parser.add_argument('-in_images', type=str, default='Contents/Images/', help='input images directory') parser.add_argument('-alpha', type=str, default='Contents/Dataset/Alpha/', help='images path') parser.add_argument('-fx', type=float, default=1, help='output size in x axis') parser.add_argument('-fy', type=float, default=1, help='output size in y axis') args = parser.parse_args() for directory in [args.alpha]: if not os.path.exists(directory): os.makedirs(directory) images_loader = ImagesLoader(args.in_images) images_loader = ImageResizer(images_loader, args.fx, args.fy) mog = cv.bgsegm.createBackgroundSubtractorMOG() index = 0 for image in images_loader: image = mog.apply(image) outputname = os.path.join(args.alpha, str(index).rjust(4, '0') + '.png') cv.imwrite(outputname, image) index += 1 print("Processed " + str(index) + " out of " + str(len(images_loader)))
default='Contents/Dataset/weights.npy', help='weights of each class') parser.add_argument('-fx', type=float, default=1, help='output size in x axis') parser.add_argument('-fy', type=float, default=1, help='output size in y axis') args = parser.parse_args() for directory in [args.masks, args.labels]: if not os.path.exists(directory): os.makedirs(directory) masks_loader = ImagesLoader(args.in_masks) masks_loader = ImageResizer(masks_loader, args.fx, args.fy) classes = getClasses(args.classes) batch_loader = DataLoader(masks_loader, args.batch, False, num_workers=4) weights = np.zeros(len(classes)) index = 0 for masks in batch_loader: masks = torch.ByteTensor(masks).to(args.device) matrices, _weights = images_to_matrices(masks, classes, args.device) masks = matrices_to_images(matrices, classes, args.device) weights += _weights for i in range(len(masks)): outputname = os.path.join(args.masks, str(index).rjust(4, '0') + '.npy') np.save(outputname, matrices[i].cpu().numpy()) outputname = os.path.join(args.labels, str(index).rjust(4, '0') + '.png')