Esempio n. 1
0
def main(args):
    # torch.cuda.set_device(args.gpu)

    # Network Builders
    builder = ModelBuilder()
    
    enc_out = torch.randn(([1,2048,64,64]))
    net_encoder = builder.build_encoder(
        weights="baseline-resnet50dilated-ppm_deepsup/encoder_epoch_20.pth")
    gcu = GraphConv()#, V=2), GCU(X=enc_out, V=4), GCU(X=enc_out, V=8),GCU(X=enc_out, V=32)]

    crit = nn.NLLLoss(ignore_index=-1)

    segmentation_module = SegmentationModule(net_encoder, gcu, crit, tr=False)

    # print("Prinitng Params", gcu[1].parameters())
    for m in gcu.parameters():
        print("Hello",m.shape,m.name,m)
    print("dddddddddddddddd", len(list(gcu.parameters())))
    for m in gcu.modules():
        print("Prining", m.parameters())
    # Dataset and Loader
    if len(args.test_imgs) == 1 and os.path.isdir(args.test_imgs[0]):
        test_imgs = find_recursive(args.test_imgs[0])

    else:
        test_imgs = args.test_imgs


    list_test = [{'fpath_img': x} for x in test_imgs]
    
    dataset_test = TestDataset(
        list_test, args, max_sample=-1)


    loader_test = torchdata.DataLoader(
        dataset_test,
        batch_size=1,
        shuffle=False,
        collate_fn=user_scattered_collate,
        num_workers=5,
        drop_last=True)


    # Main loop
    test(segmentation_module, loader_test, args)

    print('Inference done!')
Esempio n. 2
0
def main(args):
    torch.cuda.set_device(args.gpu)

    # Network Builders
    builder = ModelBuilder()
    net_encoder = builder.build_encoder(
        arch=args.arch_encoder,
        fc_dim=args.fc_dim,
        weights=args.weights_encoder)
    net_decoder = builder.build_decoder(
        arch=args.arch_decoder,
        fc_dim=args.fc_dim,
        num_class=args.num_class,
        weights=args.weights_decoder,
        use_softmax=True)

    crit = nn.NLLLoss(ignore_index=-1)

    segmentation_module = SegmentationModule(net_encoder, net_decoder, crit)

    # Dataset and Loader
    if len(args.test_imgs) == 1 and os.path.isdir(args.test_imgs[0]):
        test_imgs = find_recursive(args.test_imgs[0])
    else:
        test_imgs = args.test_imgs
    list_test = [{'fpath_img': x} for x in test_imgs]
    dataset_test = TestDataset(
        list_test, args, max_sample=args.num_val)
    loader_test = torchdata.DataLoader(
        dataset_test,
        batch_size=args.batch_size,
        shuffle=False,
        collate_fn=user_scattered_collate,
        num_workers=5,
        drop_last=True)

    segmentation_module.cuda()

    # Main loop
    test(segmentation_module, loader_test, args)

    print('Inference done!')
Esempio n. 3
0
def run():
    assert LooseVersion(torch.__version__) >= LooseVersion('0.4.0'), \
        'PyTorch>=0.4.0 is required'

    cfg.merge_from_file("config/ade20k-resnet50dilated-ppm_deepsup.yaml")
    cfg.merge_from_list([
        'DIR', 'ade20k-resnet50dilated-ppm_deepsup', 'TEST.result', './',
        'TEST.checkpoint', 'epoch_20.pth'
    ])
    # cfg.freeze()

    logger = setup_logger(distributed_rank=0)  # TODO
    logger.info("Loaded configuration file {}".format(
        "config/ade20k-resnet50dilated-ppm_deepsup.yaml"))
    logger.info("Running with config:\n{}".format(cfg))

    cfg.MODEL.arch_encoder = cfg.MODEL.arch_encoder.lower()
    cfg.MODEL.arch_decoder = cfg.MODEL.arch_decoder.lower()

    # absolute paths of model weights
    cfg.MODEL.weights_encoder = os.path.join(cfg.DIR,
                                             'encoder_' + cfg.TEST.checkpoint)
    cfg.MODEL.weights_decoder = os.path.join(cfg.DIR,
                                             'decoder_' + cfg.TEST.checkpoint)

    assert os.path.exists(cfg.MODEL.weights_encoder) and \
        os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"
    imgs = "/content/semantic-segmentation-pytorch/ADE_val_00001519.jpg"
    # generate testing image list
    if os.path.isdir(imgs):
        imgs = find_recursive(imgs)
    else:
        imgs = [imgs]
    assert len(imgs), "imgs should be a path to image (.jpg) or directory."
    cfg.list_test = [{'fpath_img': x} for x in imgs]

    if not os.path.isdir(cfg.TEST.result):
        os.makedirs(cfg.TEST.result)

    return main_run(cfg, 0)
Esempio n. 4
0
    # absolute paths of model weights
    cfg.MODEL.weights_encoder = os.path.join(cfg.DIR,
                                             'encoder_' + cfg.TEST.checkpoint)
    cfg.MODEL.weights_decoder = os.path.join(cfg.DIR,
                                             'decoder_' + cfg.TEST.checkpoint)

    assert os.path.exists(cfg.MODEL.weights_encoder) and \
        os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"

    # generate testing image list
    # print("IMAGES:" + args.imgs)
    # if os.path.isdir(args.imgs[0]):
    #     print("images[0]" + args.imgs)
    #     imgs = find_recursive(args.imgs)
    # else:
    #     imgs = [args.imgs]

    if os.path.isfile(args.imgs):
        imgs = [args.imgs]
    else:
        imgs = find_recursive(args.imgs)

    assert len(imgs), "imgs should be a path to image (.jpg) or directory."

    cfg.list_test = [{'fpath_img': x} for x in imgs]

    if not os.path.isdir(cfg.TEST.result):
        os.makedirs(cfg.TEST.result)

    main(cfg, args.gpu)
Esempio n. 5
0
                        help="base directory name")
    parser.add_argument("--segpath",
                        default="data/",
                        required=False,
                        type=str,
                        help="base directory name")
    parser.add_argument("--outfile",
                        default="output/imageList.txt",
                        type=str,
                        help="path to output file",
                        required=False)
    args = parser.parse_args()

    imgs = []
    if os.path.isdir(args.imgpath):
        imgs += find_recursive(args.imgpath, '.jpg')
        imgs += find_recursive(args.imgpath, '.png')
    else:
        print("Exception: imgpath {} is not a directory".format(args.imgpath))

    if not os.path.isdir(args.segpath):
        print("Exception: segpath {} is not a directory".format(args.segpath))

    print('{} images found in {}'.format(len(imgs), args.imgpath))
    #     print(args.segs)
    #     segs = find_recursive(args.segs)
    # else:
    list = []
    for img in imgs:
        imgSize = get_image_size(img)
        seg = img.replace('images', 'labels')
 parser.add_argument("--nproc",
                     required=False,
                     type=int,
                     help="Number of parralel processes",
                     default=mp.cpu_count())
 parser.add_argument("--chunk",
                     required=False,
                     type=int,
                     help="Chunk size for each worker thread",
                     default=mp.cpu_count())
 # Read args
 args = parser.parse_args()
 # Generate image list
 if os.path.isdir(args.input):
     print(args.input)
     imgs = find_recursive(args.input, ext='.png')
 else:
     imgs = [args.input]
 assert len(
     imgs
 ), "Exception: imgs should be a path to image (.png|jpg) or directory."
 # Create output directory
 if not os.path.isdir(args.output):
     print('Creating empty output directory {}'.format(args.output))
     os.makedirs(args.output)
 pool = mp.Pool(args.nproc)
 # Assign tasks to workers
 for _ in tqdm(pool.imap_unordered(smoothSegmentation,
                                   [(img) for img in imgs],
                                   chunksize=args.chunk),
               total=len(imgs),
    # cfg.freeze()

    logger = setup_logger(distributed_rank=0)  # TODO
    logger.info("Loaded configuration file {}".format(args.cfg))
    logger.info("Running with config:\n{}".format(cfg))

    cfg.MODEL.arch_encoder = cfg.MODEL.arch_encoder.lower()
    cfg.MODEL.arch_decoder = cfg.MODEL.arch_decoder.lower()

    # absolute paths of model weights
    cfg.MODEL.weights_encoder = os.path.join(cfg.DIR,
                                             'encoder_' + cfg.TEST.checkpoint)
    cfg.MODEL.weights_decoder = os.path.join(cfg.DIR,
                                             'decoder_' + cfg.TEST.checkpoint)

    assert os.path.exists(cfg.MODEL.weights_encoder) and \
        os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"

    # generate testing image list
    if os.path.isdir(args.imgs):  #was args.imgs[0], but that's wrong
        imgs = find_recursive(args.imgs)  #was args.imgs[0], but that's wrong
    else:
        imgs = [args.imgs]
    assert len(imgs), "imgs should be a path to image (.jpg) or directory."
    cfg.list_test = [{'fpath_img': x} for x in imgs]

    if not os.path.isdir(cfg.TEST.result):
        os.makedirs(cfg.TEST.result)

    main(cfg, args.gpu)
Esempio n. 8
0
cur_cwd = os.getcwd()
# os.chdir(os.path.abspath(os.path.join(cur_cwd, 'project_improvements', 'doors_detection')))

# get all the files in the input folder

input_folder = os.path.join(os.path.join(cur_cwd, 'input'))
output_folder = os.path.join(os.path.join(cur_cwd, 'output'))
final_folder = os.path.join(os.path.join(output_folder, 'final'))
debug_folder = os.path.join(os.path.join(output_folder, 'debug'))
unsuccessful_folder = os.path.join(os.path.join(output_folder, 'unsuccessful'))

for folder in [output_folder, final_folder, debug_folder, unsuccessful_folder]:
    if not os.path.exists(folder):
        os.mkdir(folder)

input_imgs = find_recursive(input_folder, ext=['.png', '.jpg'])

for input_img in input_imgs:
    print("Inferring for " + input_img)
    img_name = os.path.basename(input_img)[:-4]  # remove the extension
    debug_folder = os.path.join(os.path.join(output_folder, 'debug', img_name))
    segmented_img = get_segmentation(input_img,
                                     debug=True,
                                     debug_folder=debug_folder)
    if segmented_img is not None:
        cv2.imwrite(os.path.join(final_folder, img_name + '.jpg'),
                    segmented_img)
    else:
        copyfile(
            input_img,
            os.path.join(unsuccessful_folder, os.path.basename(input_img)))
Esempio n. 9
0
    elif args.dataset == 'ADE20K':
        nameMappingFile = Path('data/ADEMap.json')
    elif args.dataset == 'PASCAL':
        nameMappingFile = Path('data/PASCALMap.json')
    with open(nameMappingFile) as mfile:
        mapNames = json.load(mfile)
    elif args.dataset == 'ADE20K':
        nameMappingFile = Path('data/ADE20kMap.json')
        with open(nameMappingFile) as mfile:
            mapNames = json.load(mfile)
    else:
        print('Exception: Dataset type {} unknown'.format(dataset))
    # Generate image list
    if os.path.isdir(args.input):
        print(args.input)
        imgs = find_recursive(args.input, ext='.mat')
    else:
        imgs = [args.input]
    assert len(imgs), "Exception: imgs should be a path to image (.jpg) or directory."
    # Create output directory
    if not os.path.isdir(args.output):
        print('Creating empty output directory: {}'.format(args.output))
        os.makedirs(args.output)
    # Create worker pool
    pool = mp.Pool(args.nproc)
    # Assign tasks to workers
    for _ in tqdm(pool.imap_unordered(remapImageMat,[(img) for img in imgs], chunksize=args.chunk), total=len(imgs), desc='Mapping images', ascii=True):
       pass
    # Close pool
    pool.close()
Esempio n. 10
0
def arg_from_ui(imgs,
                progress,
                gpu_flag=None,
                config_path=None,
                dir=None,
                checkpoint=None,
                result=None):
    assert LooseVersion(torch.__version__) >= LooseVersion('0.4.0'), \
        'PyTorch>=0.4.0 is required'
    # args = {'cfg': 'config/ade20k-resnet50dilated-ppm_deepsup.yaml', 'gpu': 0, 'opts': None, 'gpu_flag': False,
    #         'dir': 'ade20k-resnet50dilated-ppm_deepsup', 'result': 'segmentation', 'checkpoint': 'epoch_20.pth'}
    parser = argparse.ArgumentParser(
        description="PyTorch Semantic Segmentation Testing")
    parser.add_argument("--imgs",
                        default=imgs,
                        type=str,
                        help="an image paths, or a directory name")
    parser.add_argument(
        "--config_path",
        default="config/ade20k-resnet50dilated-ppm_deepsup.yaml",
        metavar="FILE",
        help="path to config file",
        type=str,
    )
    parser.add_argument("--gpu",
                        default=0,
                        type=int,
                        help="gpu id for evaluation")
    parser.add_argument(
        "opts",
        help="Modify config options using the command-line",
        default=None,
        nargs=argparse.REMAINDER,
    )
    parser.add_argument(
        "--gpu_flag",
        help="open and close gpu",
        default=True,
        nargs=argparse.REMAINDER,
    )
    parser.add_argument(
        "--dir",
        help="model dir",
        default="ade20k-resnet50dilated-ppm_deepsup",
        nargs=argparse.REMAINDER,
    )
    parser.add_argument(
        "--result",
        help="segmentation result dir",
        default="segmentation",
        nargs=argparse.REMAINDER,
    )
    parser.add_argument(
        "--checkpoint",
        help="pretrained model checkpoint",
        default="epoch_20.pth",
        nargs=argparse.REMAINDER,
    )
    args = parser.parse_args()
    if gpu_flag is not None:
        args.gpu_flag = gpu_flag
    if config_path:
        args.config_path = config_path
    if dir:
        args.dir = dir
    if checkpoint:
        args.checkpoint = checkpoint
    if result:
        args.result = result

    cfg.merge_from_file(args.config_path)
    cfg.merge_from_list(args.opts)
    # cfg.freeze()

    logger = setup_logger(distributed_rank=0)  # TODO
    logger.info("Loaded configuration file {}".format(args.config_path))
    logger.info("Running with config:\n{}".format(cfg))

    cfg.MODEL.arch_encoder = cfg.MODEL.arch_encoder.lower()
    cfg.MODEL.arch_decoder = cfg.MODEL.arch_decoder.lower()

    # absolute paths of model weights
    cfg.MODEL.weights_encoder = os.path.join(args.dir,
                                             'encoder_' + args.checkpoint)
    cfg.MODEL.weights_decoder = os.path.join(args.dir,
                                             'decoder_' + args.checkpoint)
    print(cfg.MODEL.weights_encoder)

    assert os.path.exists(cfg.MODEL.weights_encoder) and \
           os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"

    # generate testing image list
    print('-----imgs:', args.imgs)
    if os.path.isdir(args.imgs):
        imgs = find_recursive(args.imgs)
    else:
        imgs = [args.imgs]
    assert len(imgs), "imgs should be a path to image (.jpg) or directory."
    cfg.list_test = [{'fpath_img': x} for x in imgs]

    if not os.path.isdir(args.result):
        os.makedirs(args.result)

    main(cfg, args.gpu, args, progress)