Ejemplo n.º 1
0
    def __init__(self, args):
        self.args = args
        # image transform
        input_transform = transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
        ])
        # dataset and dataloader
        data_kwargs = {'transform': input_transform, 'base_size': args.base_size,
                       'crop_size': args.crop_size}
        trainset = get_segmentation_dataset(
            args.dataset, split=args.train_split, mode='train', **data_kwargs)
        valset = get_segmentation_dataset(
            args.dataset, split='val', mode='val', **data_kwargs)
        self.train_data = gluon.data.DataLoader(
            trainset, args.batch_size, shuffle=True, last_batch='rollover',
            num_workers=args.workers)
        self.eval_data = gluon.data.DataLoader(valset, args.test_batch_size,
            last_batch='rollover', num_workers=args.workers)
        # create network
        if args.model_zoo is not None:
            model = get_model(args.model_zoo, pretrained=True)
        else:
            model = get_segmentation_model(model=args.model, dataset=args.dataset,
                                           backbone=args.backbone, norm_layer=args.norm_layer,
                                           norm_kwargs=args.norm_kwargs, aux=args.aux,
                                           crop_size=args.crop_size)
        model.cast(args.dtype)
        print(model)
        self.net = DataParallelModel(model, args.ctx, args.syncbn)
        self.evaluator = DataParallelModel(SegEvalModel(model), args.ctx)
        # resume checkpoint if needed
        if args.resume is not None:
            if os.path.isfile(args.resume):
                model.load_parameters(args.resume, ctx=args.ctx)
            else:
                raise RuntimeError("=> no checkpoint found at '{}'" \
                    .format(args.resume))
        # create criterion
        criterion = MixSoftmaxCrossEntropyLoss(args.aux, aux_weight=args.aux_weight)
        self.criterion = DataParallelCriterion(criterion, args.ctx, args.syncbn)
        # optimizer and lr scheduling
        self.lr_scheduler = LRScheduler(mode='poly', baselr=args.lr,
                                        niters=len(self.train_data), 
                                        nepochs=args.epochs)
        kv = mx.kv.create(args.kvstore)
        optimizer_params = {'lr_scheduler': self.lr_scheduler,
                            'wd':args.weight_decay,
                            'momentum': args.momentum}
        if args.dtype == 'float16':
            optimizer_params['multi_precision'] = True

        if args.no_wd:
            for k, v in self.net.module.collect_params('.*beta|.*gamma|.*bias').items():
                v.wd_mult = 0.0

        self.optimizer = gluon.Trainer(self.net.module.collect_params(), 'sgd',
                                       optimizer_params, kvstore = kv)
        # evaluation metrics
        self.metric = gluoncv.utils.metrics.SegmentationMetric(trainset.num_class)
Ejemplo n.º 2
0
def test(args):
    # output folder
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # image transform
    input_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
    ])
    # dataset and dataloader
    if args.eval:
        testset = get_segmentation_dataset(
            args.dataset, split='val', mode='testval', transform=input_transform, root='/mnt/mdisk/xcq/VOCdevkit/')
        total_inter, total_union, total_correct, total_label = \
            np.int64(0), np.int64(0), np.int64(0), np.int64(0)
    else:
        testset = get_segmentation_dataset(
            args.dataset, split='test', mode='test', transform=input_transform, root='/mnt/mdisk/xcq/VOCdevkit/')
    test_data = gluon.data.DataLoader(
        testset, args.test_batch_size, shuffle=False, last_batch='keep',
        batchify_fn=ms_batchify_fn, num_workers=args.workers)
    # create network
    if args.model_zoo is not None:
        model = get_model(args.model_zoo, pretrained=True)
    else:
        model = get_segmentation_model(model=args.model, dataset=args.dataset, ctx=args.ctx,
                                       backbone=args.backbone, norm_layer=args.norm_layer,
                                       norm_kwargs=args.norm_kwargs, aux=args.aux,
                                       base_size=args.base_size, crop_size=args.crop_size)
        # load pretrained weight
        assert args.resume is not None, '=> Please provide the checkpoint using --resume'
        if os.path.isfile(args.resume):
            model.load_parameters(args.resume, ctx=args.ctx)
        else:
            raise RuntimeError("=> no checkpoint found at '{}'" \
                .format(args.resume))
    print(model)
    evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
    metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

    tbar = tqdm(test_data)
    for i, (data, dsts) in enumerate(tbar):
        if args.eval:
            predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
            targets = [target.as_in_context(predicts[0].context) \
                       for target in dsts]
            metric.update(targets, predicts)
            pixAcc, mIoU = metric.get()
            tbar.set_description( 'pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
        else:
            im_paths = dsts
            predicts = evaluator.parallel_forward(data)
            for predict, impath in zip(predicts, im_paths):
                predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 1)).asnumpy() + \
                    testset.pred_offset
                mask = get_color_pallete(predict, args.dataset)
                outname = os.path.splitext(impath)[0] + '.png'
                mask.save(os.path.join(outdir, outname))
Ejemplo n.º 3
0
def test(args):
    # output folder
    means = nd.array([123, 117, 104])
    std = nd.array([58.395, 57.12, 57.375])
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # dataset and dataloader
    if args.eval:
        img = image.imread('./0000000152.png')
        img = img.astype('float32')
        img = img - means
        img = img / std
        img = nd.transpose(img, (2, 0, 1))
        img = nd.expand_dims(img, axis=0)
        testset = get_segmentation_dataset(
            args.dataset, split='val', mode='testval', transform=input_transform, root='/mnt/mdisk/xcq/VOCdevkit/')
        total_inter, total_union, total_correct, total_label = \
            np.int64(0), np.int64(0), np.int64(0), np.int64(0)
    else:
        testset = get_segmentation_dataset(
            args.dataset, split='test', mode='test', transform=input_transform, root='/mnt/mdisk/xcq/VOCdevkit/')
    if args.model_zoo is not None:
        model = get_model(args.model_zoo, pretrained=True)
    else:
        model = get_segmentation_model(model=args.model, dataset=args.dataset, ctx=args.ctx,
                                       backbone=args.backbone, norm_layer=args.norm_layer,
                                       norm_kwargs=args.norm_kwargs, aux=args.aux,
                                       base_size=args.base_size, crop_size=args.crop_size)
        # load pretrained weight
        assert args.resume is not None, '=> Please provide the checkpoint using --resume'
        if os.path.isfile(args.resume):
            model.load_parameters(args.resume, ctx=args.ctx)
        else:
            raise RuntimeError("=> no checkpoint found at '{}'" \
                .format(args.resume))
    print(model)
    predicts = model(img)
    evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
    metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

    tbar = tqdm(test_data)
    for i, (data, dsts) in enumerate(tbar):
        if args.eval:
            predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
            targets = [target.as_in_context(predicts[0].context) \
                       for target in dsts]
            metric.update(targets, predicts)
            pixAcc, mIoU = metric.get()
            tbar.set_description( 'pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
        else:
            im_paths = dsts
            predicts = evaluator.parallel_forward(data)
            for predict, impath in zip(predicts, im_paths):
                predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 1)).asnumpy() + \
                    testset.pred_offset
                mask = get_color_pallete(predict, args.dataset)
                outname = os.path.splitext(impath)[0] + '.png'
                mask.save(os.path.join(outdir, outname))
Ejemplo n.º 4
0
 def __init__(self, args):
     self.args = args
     # image transform
     input_transform = transforms.Compose([
         transforms.ToTensor(),
         transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
     ])
     # dataset and dataloader
     trainset = get_segmentation_dataset(args.dataset,
                                         split='train',
                                         transform=input_transform)
     valset = get_segmentation_dataset(args.dataset,
                                       split='val',
                                       transform=input_transform)
     self.train_data = gluon.data.DataLoader(trainset,
                                             args.batch_size,
                                             shuffle=True,
                                             last_batch='rollover',
                                             num_workers=args.workers)
     self.eval_data = gluon.data.DataLoader(valset,
                                            args.test_batch_size,
                                            last_batch='keep',
                                            num_workers=args.workers)
     # create network
     model = get_segmentation_model(model=args.model,
                                    dataset=args.dataset,
                                    backbone=args.backbone,
                                    norm_layer=args.norm_layer,
                                    aux=args.aux)
     print(model)
     self.net = DataParallelModel(model, args.ctx, args.syncbn)
     self.evaluator = DataParallelModel(SegEvalModel(model), args.ctx)
     # resume checkpoint if needed
     if args.resume is not None:
         if os.path.isfile(args.resume):
             model.load_params(args.resume, ctx=args.ctx)
         else:
             raise RuntimeError("=> no checkpoint found at '{}'" \
                 .format(args.resume))
     # create criterion
     criterion = SoftmaxCrossEntropyLossWithAux(args.aux)
     self.criterion = DataParallelCriterion(criterion, args.ctx,
                                            args.syncbn)
     # optimizer and lr scheduling
     self.lr_scheduler = LRScheduler(mode='poly',
                                     baselr=args.lr,
                                     niters=len(self.train_data),
                                     nepochs=args.epochs)
     kv = mx.kv.create(args.kvstore)
     self.optimizer = gluon.Trainer(self.net.module.collect_params(),
                                    'sgd', {
                                        'lr_scheduler': self.lr_scheduler,
                                        'wd': args.weight_decay,
                                        'momentum': args.momentum,
                                        'multi_precision': True
                                    },
                                    kvstore=kv)
def get_dataset(dataset):
    if dataset.lower() == 'citys':
        train_dataset = get_segmentation_dataset(dataset+'_panoptic', split='train', mode='train')
        val_dataset = get_segmentation_dataset(dataset+'_panoptic', split='val', mode='val')
        val_metric = CitysPanopticMetric(val_dataset, "cityscapes_panoptic_val")
    elif dataset.lower() == 'coco':
        pass
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    return train_dataset, val_dataset, val_metric
Ejemplo n.º 6
0
def test_quantization(model, args, input_transform):
    # output folder
    outdir = 'outdir_int8'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # hybridize
    model.hybridize(static_alloc=True, static_shape=True)

    # get dataset
    if args.eval:
        testset = get_segmentation_dataset(
            args.dataset, split='val', mode=args.mode, transform=input_transform)
    else:
        testset = get_segmentation_dataset(
            args.dataset, split='test', mode=args.mode, transform=input_transform)
    size = len(testset)
    batchify_fn = ms_batchify_fn if testset.mode == 'test' else None
    test_data = gluon.data.DataLoader(
            testset, args.batch_size, batchify_fn=batchify_fn, last_batch='keep',
            shuffle=False, num_workers=args.workers)
    print(model)
    metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

    tbar = tqdm(test_data)
    metric.reset()
    tic = time.time()
    for i, (batch, dsts) in enumerate(tbar):
        if args.eval:
            targets = mx.gluon.utils.split_and_load(dsts, ctx_list=args.ctx, even_split=False)
            data = mx.gluon.utils.split_and_load(batch, ctx_list=args.ctx, batch_axis=0, even_split=False)
            outputs = None
            for x in data:
                output = model(x)
                outputs = output if outputs is None else nd.concat(outputs, output, axis=0)
            metric.update(targets, outputs)
            pixAcc, mIoU = metric.get()
            tbar.set_description('pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
        else:
            for data, impath in zip(batch, dsts):
                data = data.as_in_context(args.ctx[0])
                if len(data.shape) < 4:
                    data = nd.expand_dims(data, axis=0)
                predict = model(data)[0]
                predict = mx.nd.squeeze(mx.nd.argmax(predict, 1)).asnumpy() + \
                    testset.pred_offset
                mask = get_color_pallete(predict, args.dataset)
                outname = os.path.splitext(impath)[0] + '.png'
                mask.save(os.path.join(outdir, outname))
    speed = size / (time.time() - tic)
    print('Inference speed with batchsize %d is %.2f img/sec' % (args.batch_size, speed))
Ejemplo n.º 7
0
def test(model, args, input_transform):
    # DO NOT modify!!! Only support batch_size=ngus
    batch_size = args.ngpus
    # output folder
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # get dataset
    if args.eval:
        testset = get_segmentation_dataset(args.dataset,
                                           split='val',
                                           mode='testval',
                                           transform=input_transform)
        total_inter, total_union, total_correct, total_label = \
            np.int64(0), np.int64(0), np.int64(0), np.int64(0)
    else:
        testset = get_segmentation_dataset(args.dataset,
                                           split='test',
                                           mode='test',
                                           transform=input_transform)
    test_data = gluon.data.DataLoader(testset,
                                      batch_size,
                                      shuffle=False,
                                      last_batch='keep',
                                      batchify_fn=ms_batchify_fn,
                                      num_workers=args.workers)
    print(model)
    evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
    metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

    tbar = tqdm(test_data)
    for i, (data, dsts) in enumerate(tbar):
        if args.eval:
            predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
            targets = [target.as_in_context(predicts[0].context) \
                       for target in dsts]
            metric.update(targets, predicts)
            pixAcc, mIoU = metric.get()
            tbar.set_description('pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
        else:
            im_paths = dsts
            predicts = evaluator.parallel_forward(data)
            for predict, impath in zip(predicts, im_paths):
                predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 1)).asnumpy() + \
                    testset.pred_offset
                mask = get_color_pallete(predict, args.dataset)
                outname = os.path.splitext(impath)[0] + '.png'
                mask.save(os.path.join(outdir, outname))
Ejemplo n.º 8
0
    def get_dataloader(self):
        # image transform
        if 'region' in self.args.dataset:
            input_transform = transforms.Compose([
                transforms.ToTensor(),
                transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
            ])
            # dataset and dataloader
            data_kwargs = {
                'transform': input_transform,
                'size': args.crop_size
            }
        else:
            input_transform = transforms.Compose([
                transforms.ToTensor(),
                transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
            ])
            # dataset and dataloader
            data_kwargs = {
                'transform': input_transform,
                'base_size': args.base_size,
                'crop_size': args.crop_size
            }

        trainset = get_segmentation_dataset(args.dataset,
                                            root=args.dataset_root,
                                            split='train',
                                            mode='train',
                                            **data_kwargs)
        self.train_data = gluon.data.DataLoader(trainset,
                                                args.batch_size,
                                                shuffle=True,
                                                last_batch='rollover',
                                                num_workers=args.workers)

        if not self.args.no_val:
            valset = get_segmentation_dataset(args.dataset,
                                              root=args.dataset_root,
                                              split='val',
                                              mode='val',
                                              **data_kwargs)
            self.eval_data = gluon.data.DataLoader(valset,
                                                   args.test_batch_size,
                                                   last_batch='rollover',
                                                   num_workers=args.workers)
Ejemplo n.º 9
0
def get_custom_segm_dataset(mode, args):
    """ get custom segmentation dataset for training or testing
    Args:
        mode (str): "train" or "test"
        args: configs from argparse
    Returns:
        if mode is "train", return tuple(trainset, valset)
        if mode is "test", return testset
    """
    # image transform
    input_transform = get_input_transform()
    if mode == "train":
        data_kwargs = {'transform': input_transform, 'base_size': args.base_size,
                       'crop_size': args.crop_size}
        if args.dataset.lower() == 'imaterialist':
            alter_bg = False if args.alter_bg==0 else args.alter_bg
            trainset = iMaterialistSegmentation(root='datasets/imaterialist', \
                            split=args.train_split, mode='train', alter_bg=alter_bg, **data_kwargs)
            valset = iMaterialistSegmentation(root='datasets/imaterialist', \
                            split='val', mode='val', **data_kwargs)
        else:
            trainset = get_segmentation_dataset(
                args.dataset, split=args.train_split, mode='train', **data_kwargs)
            valset = get_segmentation_dataset(
                args.dataset, split='val', mode='val', **data_kwargs)
        return trainset, valset
    elif mode == "test":
        if args.eval:
            split_name, mode_name = 'val', 'testval'
        else:
            # TODO: it seems mode='test' for dataset is not implemented
            split_name, mode_name = 'test', 'test'
        if args.dataset.lower() == 'imaterialist':
            testset = iMaterialistSegmentation(root='datasets/imaterialist', \
                            split=split_name, mode=mode_name, transform=input_transform, tta=args.tta)
        else:
            testset = get_segmentation_dataset(
                args.dataset, split=split_name, mode=mode_name, transform=input_transform)

        return testset
    else:
        raise NotImplementedError("mode = %s is not supported"%mode)
Ejemplo n.º 10
0
def test(args):
    # output folder
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # image transform
    input_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
    ])
    # dataset and dataloader
    testset = get_segmentation_dataset(args.dataset,
                                       split='test',
                                       transform=input_transform)
    test_data = gluon.data.DataLoader(testset,
                                      args.test_batch_size,
                                      last_batch='keep',
                                      batchify_fn=test_batchify_fn,
                                      num_workers=args.workers)
    # create network
    model = get_segmentation_model(model=args.model,
                                   dataset=args.dataset,
                                   backbone=args.backbone,
                                   norm_layer=args.norm_layer)
    print(model)
    evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
    # load pretrained weight
    assert (args.resume is not None)
    if os.path.isfile(args.resume):
        model.load_params(args.resume, ctx=args.ctx)
    else:
        raise RuntimeError("=> no checkpoint found at '{}'" \
            .format(args.resume))

    tbar = tqdm(test_data)
    for i, (data, im_paths) in enumerate(tbar):
        predicts = evaluator.parallel_forward(data)
        for predict, impath in zip(predicts, im_paths):
            predict = mx.nd.squeeze(mx.nd.argmax(predict, 1)).asnumpy()
            mask = get_color_pallete(predict, args.dataset)
            outname = os.path.splitext(impath)[0] + '.png'
            mask.save(os.path.join(outdir, outname))
Ejemplo n.º 11
0
def test(args):
    if not horse_change:
        # output folder
        # outdir = 'outdir'
        outdir = args.outdir
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        # image transform
        input_transform = transforms.Compose([
            transforms.ToTensor(),
            # transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
            # transforms.Normalize([0, 0, 0], [1, 1, 1]),
            # transforms.Normalize([0], [100]), # this is for 1 channel: ([0], [1]) ([556.703], [482.175])
        ])
        # dataset and dataloader
        if args.eval:
            testset = get_segmentation_dataset(args.dataset, 
                                               split='val', 
                                               mode='testval', 
                                               transform=input_transform)
            total_inter, total_union, total_correct, total_label = \
                np.int64(0), np.int64(0), np.int64(0), np.int64(0)
        else:
            testset = get_segmentation_dataset(args.dataset, 
                                               split='test', 
                                               mode='test', 
                                               transform=input_transform)
        test_data = gluon.data.DataLoader(testset, 
                                          args.test_batch_size, 
                                          shuffle=False, 
                                          last_batch='keep',
                                          batchify_fn=ms_batchify_fn, 
                                          num_workers=args.workers)
        # create network
        if args.model_zoo is not None:
            model = get_model(args.model_zoo, pretrained=True)
        else:
            model = get_segmentation_model(model=args.model, 
                                           dataset=args.dataset, 
                                           ctx=args.ctx,
                                           backbone=args.backbone, 
                                           norm_layer=args.norm_layer,
                                           norm_kwargs=args.norm_kwargs, 
                                           aux=args.aux,
                                           base_size=args.base_size, 
                                           crop_size=args.crop_size)
            # load pretrained weight
            assert args.resume is not None, '=> Please provide the checkpoint using --resume'
            if os.path.isfile(args.resume):
                model.load_parameters(args.resume, ctx=args.ctx)
            else:
                raise RuntimeError("=> no checkpoint found at '{}'" \
                    .format(args.resume))
        # print(model) # [horse]: do not print model
        evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
        metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

        print('testset.pred_offset:', testset.pred_offset) # horse
        print('model.crop_size', model.crop_size) # horse

        tbar = tqdm(test_data)
        for i, (data, dsts) in enumerate(tbar):
            if args.eval:
                # print('data', data[0].shape) # horse
                predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
                # print('predicts', predicts[0].shape)
                targets = [target.as_in_context(predicts[0].context) \
                           for target in dsts]
                # horse begin 
                '''
                predict = mx.nd.squeeze(mx.nd.argmax(predicts[0], 0)).asnumpy() + \
                        testset.pred_offset
                '''
                # horse end
                print('targets', targets[0].shape)
                metric.update(targets, predicts)
                pixAcc, mIoU = metric.get()
                tbar.set_description( 'pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
            else:
                output_score_map = True # [horse added]
                if output_score_map:
                    # score_map_dir = 'scoredir'
                    score_map_dir = args.scoredir
                    if not os.path.exists(score_map_dir):
                        os.makedirs(score_map_dir)

                im_paths = dsts
                # print('data', data[0].shape) # horse
                predicts = evaluator.parallel_forward(data)
                # print(predicts[0].shape)
                for predict, impath in zip(predicts, im_paths):
                    # change from 1 to 0 [horse]
                    # print('predict:', predict[0].shape) # predict: (3, 127, 207)
                    if output_score_map:
                        score_map_name = os.path.splitext(impath)[0] + '.pkl'
                        score_map_path = os.path.join(score_map_dir, score_map_name)
                        with open(score_map_path, 'wb') as fo:
                            pickle.dump(predict[0].asnumpy()[0:3,:,:], fo)
                    '''
                    if i == 50:
                        with open('have_a_look.pkl', 'wb') as fo:
                            pickle.dump(predict[0].asnumpy(),fo)
                    '''
                    predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 0)).asnumpy() + \
                        testset.pred_offset
                    mask = get_color_pallete(predict, args.dataset)
                    outname = os.path.splitext(impath)[0] + '.png'
                    # print('predict:', predict.shape) # predict: (127, 207)
                    # print('mask:', mask) # it is a PIL.Image.Image
                    mask.save(os.path.join(outdir, outname))
                # break

    if horse_change: 
        # >>>>>>>>>> >>>>>>>>>> >>>>>>>>>> >>>>>>>>>> >>>>>>>>>> >>>>>>>>>>
        # output folder
        outdir = 'outdir'
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        # image transform
        input_transform = transforms.Compose([
            transforms.ToTensor(),
            # transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
            # transforms.Normalize([0, 0, 0], [1, 1, 1]),
            # transforms.Normalize([0], [100]), # this is for 1 channel: ([0], [1]) ([556.703], [482.175])
        ])
        # dataset and dataloader
        if args.eval:
            testset = get_segmentation_dataset(args.dataset, 
                                               split='val', 
                                               mode='testval', 
                                               transform=input_transform)
            total_inter, total_union, total_correct, total_label = \
                np.int64(0), np.int64(0), np.int64(0), np.int64(0)
        else:
            testset = get_segmentation_dataset(args.dataset, 
                                               split='test', 
                                               mode='test', 
                                               transform=input_transform)

        test_data = gluon.data.DataLoader(testset, 
                                          args.batch_size, # args.test_batch_size, [horse changed this]
                                          shuffle=False, 
                                          last_batch='keep',
                                          batchify_fn=ms_batchify_fn, 
                                          num_workers=args.workers)
        # create network
        if args.model_zoo is not None:
            model = get_model(args.model_zoo, pretrained=True)
        else:
            model = get_segmentation_model(model=args.model, 
                                           dataset=args.dataset, 
                                           ctx=args.ctx,
                                           backbone=args.backbone, 
                                           norm_layer=args.norm_layer,
                                           norm_kwargs=args.norm_kwargs, 
                                           aux=args.aux,
                                           base_size=args.base_size, 
                                           crop_size=args.crop_size)
            # load pretrained weight
            assert args.resume is not None, '=> Please provide the checkpoint using --resume'
            if os.path.isfile(args.resume):
                model.load_parameters(args.resume, ctx=args.ctx)
            else:
                raise RuntimeError("=> no checkpoint found at '{}'" \
                    .format(args.resume))
        # print(model) # [horse]: do not print model
        evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)
        metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

        print('testset.pred_offset:', testset.pred_offset) # horse
        print('model.crop_size', model.crop_size) # horse

        tbar = tqdm(test_data)
        for i, (data, dsts) in enumerate(tbar):
            if args.eval:
                # print('data', data[0].shape) # horse
                predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
                # print('predicts', predicts[0].shape)
                targets = [target.as_in_context(predicts[0].context) \
                           for target in dsts]
                # horse begin 
                '''
                predict = mx.nd.squeeze(mx.nd.argmax(predicts[0], 0)).asnumpy() + \
                        testset.pred_offset
                '''
                # horse end
                print('targets', targets[0].shape)
                metric.update(targets, predicts)
                pixAcc, mIoU = metric.get()
                tbar.set_description( 'pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
            else:
                output_score_map = True # [horse added]
                if output_score_map:
                    score_map_dir = 'scoredir'

                im_paths = dsts
                print('data', data[0].shape) # horse
                predicts = evaluator.parallel_forward(data)
                print(predicts[0].shape)
                for predict, impath in zip(predicts, im_paths):

                    predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 0)).asnumpy() + \
                        testset.pred_offset
                    mask = get_color_pallete(predict, args.dataset)
                    outname = os.path.splitext(impath)[0] + '.png'

                    mask.save(os.path.join(outdir, outname))
Ejemplo n.º 12
0
    if args.benchmark:
        print('------benchmarking on %s model------' % model_prefix)
        benchmarking(model, args)
        sys.exit()

    # image transform
    input_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
    ])

    if args.calibration or '_int8' in model_prefix:
        # get dataset
        if args.eval:
            testset = get_segmentation_dataset(args.dataset,
                                               split='val',
                                               mode=args.mode,
                                               transform=input_transform)
        else:
            testset = get_segmentation_dataset(args.dataset,
                                               split='test',
                                               mode=args.mode,
                                               transform=input_transform)
        size = len(testset)
        batchify_fn = ms_batchify_fn if testset.mode == 'test' else None
        # get dataloader
        test_data = gluon.data.DataLoader(testset,
                                          args.batch_size,
                                          batchify_fn=batchify_fn,
                                          last_batch='rollover',
                                          shuffle=False,
                                          num_workers=args.workers)
Ejemplo n.º 13
0
    def __init__(self, args, logger):
        self.args = args
        self.logger = logger

        # image transform
        input_transform = transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
        ])

        # dataset and dataloader
        data_kwargs = {
            'transform': input_transform,
            'base_size': args.base_size,
            'crop_size': args.crop_size
        }
        trainset = get_segmentation_dataset(args.dataset,
                                            split=args.train_split,
                                            mode='train',
                                            **data_kwargs)
        valset = get_segmentation_dataset(args.dataset,
                                          split='val',
                                          mode='val',
                                          **data_kwargs)
        self.train_data = gluon.data.DataLoader(trainset,
                                                args.batch_size,
                                                shuffle=True,
                                                last_batch='rollover',
                                                num_workers=args.workers)
        self.eval_data = gluon.data.DataLoader(valset,
                                               args.test_batch_size,
                                               last_batch='rollover',
                                               num_workers=args.workers)

        # create network
        if args.model_zoo is not None:
            model = get_model(args.model_zoo,
                              norm_layer=args.norm_layer,
                              norm_kwargs=args.norm_kwargs,
                              aux=args.aux,
                              base_size=args.base_size,
                              crop_size=args.crop_size,
                              pretrained=args.pretrained)
        else:
            model = get_segmentation_model(model=args.model,
                                           dataset=args.dataset,
                                           backbone=args.backbone,
                                           norm_layer=args.norm_layer,
                                           norm_kwargs=args.norm_kwargs,
                                           aux=args.aux,
                                           base_size=args.base_size,
                                           crop_size=args.crop_size)
        # for resnest use only
        from gluoncv.nn.dropblock import set_drop_prob
        from functools import partial
        apply_drop_prob = partial(set_drop_prob, 0.0)
        model.apply(apply_drop_prob)

        model.cast(args.dtype)
        logger.info(model)

        self.net = DataParallelModel(model, args.ctx, args.syncbn)
        self.evaluator = DataParallelModel(SegEvalModel(model), args.ctx)
        # resume checkpoint if needed
        if args.resume is not None:
            if os.path.isfile(args.resume):
                model.load_parameters(args.resume, ctx=args.ctx)
            else:
                raise RuntimeError("=> no checkpoint found at '{}'".format(
                    args.resume))

        # create criterion
        if 'icnet' in args.model:
            criterion = ICNetLoss(crop_size=args.crop_size)
        else:
            criterion = MixSoftmaxCrossEntropyLoss(args.aux,
                                                   aux_weight=args.aux_weight)
        self.criterion = DataParallelCriterion(criterion, args.ctx,
                                               args.syncbn)

        # optimizer and lr scheduling
        self.lr_scheduler = LRSequential([
            LRScheduler('linear',
                        base_lr=0,
                        target_lr=args.lr,
                        nepochs=args.warmup_epochs,
                        iters_per_epoch=len(self.train_data)),
            LRScheduler(mode='poly',
                        base_lr=args.lr,
                        nepochs=args.epochs - args.warmup_epochs,
                        iters_per_epoch=len(self.train_data),
                        power=0.9)
        ])
        kv = mx.kv.create(args.kvstore)

        if args.optimizer == 'sgd':
            optimizer_params = {
                'lr_scheduler': self.lr_scheduler,
                'wd': args.weight_decay,
                'momentum': args.momentum,
                'learning_rate': args.lr
            }
        elif args.optimizer == 'adam':
            optimizer_params = {
                'lr_scheduler': self.lr_scheduler,
                'wd': args.weight_decay,
                'learning_rate': args.lr
            }
        else:
            raise ValueError('Unsupported optimizer {} used'.format(
                args.optimizer))

        if args.dtype == 'float16':
            optimizer_params['multi_precision'] = True

        if args.no_wd:
            for k, v in self.net.module.collect_params(
                    '.*beta|.*gamma|.*bias').items():
                v.wd_mult = 0.0

        self.optimizer = gluon.Trainer(self.net.module.collect_params(),
                                       args.optimizer,
                                       optimizer_params,
                                       kvstore=kv)
        # evaluation metrics
        self.metric = gluoncv.utils.metrics.SegmentationMetric(
            trainset.num_class)
Ejemplo n.º 14
0
def test(model, args, input_transform):
    # DO NOT modify!!! Only support batch_size=ngus
    batch_size = args.ngpus

    # output folder
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    # get dataset
    if args.eval:
        testset = get_segmentation_dataset(
            args.dataset, split='val', mode='testval', transform=input_transform)
    else:
        testset = get_segmentation_dataset(
            args.dataset, split='test', mode='test', transform=input_transform)

    if 'icnet' in args.model:
        test_data = gluon.data.DataLoader(
            testset, batch_size, shuffle=False, last_batch='rollover',
            num_workers=args.workers)
    else:
        test_data = gluon.data.DataLoader(
            testset, batch_size, shuffle=False, last_batch='rollover',
            batchify_fn=ms_batchify_fn, num_workers=args.workers)
    print(model)

    if 'icnet' in args.model:
        evaluator = DataParallelModel(SegEvalModel(model), ctx_list=args.ctx)
    else:
        evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)

    metric = gluoncv.utils.metrics.SegmentationMetric(testset.num_class)

    if 'icnet' in args.model:
        tbar = tqdm(test_data)
        t_gpu = 0
        num = 0
        for i, (data, dsts) in enumerate(tbar):
            tic = time.time()
            outputs = evaluator(data.astype('float32', copy=False))
            t_gpu += time.time() - tic
            num += 1

            outputs = [x[0] for x in outputs]
            targets = mx.gluon.utils.split_and_load(dsts, ctx_list=args.ctx, even_split=False)
            metric.update(targets, outputs)

            pixAcc, mIoU = metric.get()
            gpu_time = t_gpu / num
            tbar.set_description('pixAcc: %.4f, mIoU: %.4f, t_gpu: %.2fms' % (pixAcc, mIoU, gpu_time*1000))
    else:
        tbar = tqdm(test_data)
        for i, (data, dsts) in enumerate(tbar):
            if args.eval:
                predicts = [pred[0] for pred in evaluator.parallel_forward(data)]
                targets = [target.as_in_context(predicts[0].context) \
                           for target in dsts]
                metric.update(targets, predicts)
                pixAcc, mIoU = metric.get()
                tbar.set_description('pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
            else:
                im_paths = dsts
                predicts = evaluator.parallel_forward(data)
                for predict, impath in zip(predicts, im_paths):
                    predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 1)).asnumpy() + \
                        testset.pred_offset
                    mask = get_color_pallete(predict, args.dataset)
                    outname = os.path.splitext(impath)[0] + '.png'
                    mask.save(os.path.join(outdir, outname))
Ejemplo n.º 15
0
def test(args):
    # output folder
    outdir = 'outdir'
    if not os.path.exists(outdir):
        os.makedirs(outdir)
    # image transform
    input_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
    ])
    # dataset and dataloader
    if args.eval:
        testset = get_segmentation_dataset(args.dataset,
                                           split='val',
                                           mode='testval',
                                           transform=input_transform)
        total_inter, total_union, total_correct, total_label = \
            np.int64(0), np.int64(0), np.int64(0), np.int64(0)
    else:
        testset = get_segmentation_dataset(args.dataset,
                                           split='test',
                                           mode='test',
                                           transform=input_transform)
    test_data = gluon.data.DataLoader(testset,
                                      args.test_batch_size,
                                      last_batch='keep',
                                      batchify_fn=ms_batchify_fn,
                                      num_workers=args.workers)
    # create network
    if args.model_zoo is not None:
        model = get_model(args.model_zoo, pretrained=True)
    else:
        model = get_segmentation_model(model=args.model,
                                       dataset=args.dataset,
                                       ctx=args.ctx,
                                       backbone=args.backbone,
                                       norm_layer=args.norm_layer)
        # load pretrained weight
        assert args.resume is not None, '=> Please provide the checkpoint using --resume'
        if os.path.isfile(args.resume):
            model.load_params(args.resume, ctx=args.ctx)
        else:
            raise RuntimeError("=> no checkpoint found at '{}'" \
                .format(args.resume))
    print(model)
    evaluator = MultiEvalModel(model, testset.num_class, ctx_list=args.ctx)

    tbar = tqdm(test_data)
    for i, (data, dsts) in enumerate(tbar):
        if args.eval:
            targets = dsts
            predicts = evaluator.parallel_forward(data)
            for predict, target in zip(predicts, targets):
                target = target.as_in_context(predict[0].context)
                correct, labeled = batch_pix_accuracy(predict[0], target)
                inter, union = batch_intersection_union(
                    predict[0], target, testset.num_class)
                total_correct += correct.astype('int64')
                total_label += labeled.astype('int64')
                total_inter += inter.astype('int64')
                total_union += union.astype('int64')
            pixAcc = np.float64(1.0) * total_correct / (
                np.spacing(1, dtype=np.float64) + total_label)
            IoU = np.float64(1.0) * total_inter / (
                np.spacing(1, dtype=np.float64) + total_union)
            mIoU = IoU.mean()
            tbar.set_description('pixAcc: %.4f, mIoU: %.4f' % (pixAcc, mIoU))
        else:
            im_paths = dsts
            predicts = evaluator.parallel_forward(data)
            for predict, impath in zip(predicts, im_paths):
                predict = mx.nd.squeeze(mx.nd.argmax(predict[0], 1)).asnumpy()
                mask = get_color_pallete(predict, args.dataset)
                outname = os.path.splitext(impath)[0] + '.png'
                mask.save(os.path.join(outdir, outname))
Ejemplo n.º 16
0
    def __init__(self, args):
        self.args = args

        self.two_model = False  ##
        self.semi = False

        # image transform
        input_transform = transforms.Compose([
            transforms.ToTensor(),
            # transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
            # transforms.Normalize([0, 0, 0], [1, 1, 1]), # ([0, 0, 0], [1, 1, 1])
            # transforms.Normalize([0], [1]), # this is for 1 channel: ([0], [1]) ([556.703], [482.175])
        ])

        # dataset and dataloader
        data_kwargs = {
            'transform': input_transform,
            'base_size': args.base_size,
            'crop_size': args.crop_size
        }

        trainset = get_segmentation_dataset(args.dataset,
                                            split=args.train_split,
                                            mode='train',
                                            **data_kwargs)

        valset = get_segmentation_dataset(args.dataset,
                                          split='val',
                                          mode='val',
                                          **data_kwargs)

        self.train_data = gluon.data.DataLoader(trainset,
                                                args.batch_size,
                                                shuffle=True,
                                                last_batch='rollover',
                                                num_workers=args.workers)

        self.eval_data = gluon.data.DataLoader(
            valset,
            args.batch_size,  # args.test_batch_size, [horse changed this]
            last_batch='rollover',
            num_workers=args.workers)

        # create network
        if args.model_zoo is not None:
            print('get model from the zoo.')
            model = get_model(args.model_zoo, pretrained=True)
            if self.two_model:
                self.model2 = get_model(
                    args.model_zoo, pretrained=True)  ## 2nd identical model
        else:
            print('create model.')
            model = get_segmentation_model(model=args.model,
                                           dataset=args.dataset,
                                           backbone=args.backbone,
                                           norm_layer=args.norm_layer,
                                           norm_kwargs=args.norm_kwargs,
                                           aux=args.aux,
                                           crop_size=args.crop_size,
                                           pretrained=False)
            if self.two_model:
                self.model2 = get_segmentation_model(
                    model=args.model,
                    dataset=args.dataset,
                    backbone=args.backbone,
                    norm_layer=args.norm_layer,
                    norm_kwargs=args.norm_kwargs,
                    aux=args.aux,
                    crop_size=args.crop_size,
                    pretrained=False)

        model.cast(args.dtype)
        if self.two_model:
            self.model2.cast(args.dtype)
        # print(model) # don't print model
        # print(help(model.collect_params))
        # >>> Notice here <<<
        # model.initialize() # horse ref: https://discuss.mxnet.io/t/object-detection-transfer-learning/2477/2
        ''' '''
        self.net = DataParallelModel(model, args.ctx, args.syncbn)
        self.evaluator = DataParallelModel(SegEvalModel(model), args.ctx)

        if self.two_model:
            self.evaluator2 = DataParallelModel(SegEvalModel(self.model2),
                                                args.ctx)

        # resume checkpoint if needed
        if args.resume is not None:
            if os.path.isfile(args.resume):
                if not horse_changed:
                    model.load_parameters(args.resume, ctx=args.ctx)
                if horse_changed:
                    model.load_parameters(args.resume,
                                          ctx=args.ctx,
                                          allow_missing=True,
                                          ignore_extra=True)
            else:
                raise RuntimeError("=> no checkpoint found at '{}'" \
                    .format(args.resume))
        ''' 
        self.net = DataParallelModel(model, args.ctx, args.syncbn)
        self.evaluator = DataParallelModel(SegEvalModel(model), args.ctx)
        '''

        # create criterion
        criterion = MixSoftmaxCrossEntropyLoss(args.aux,
                                               aux_weight=args.aux_weight)
        self.criterion = DataParallelCriterion(criterion, args.ctx,
                                               args.syncbn)

        # optimizer and lr scheduling
        self.lr_scheduler = LRScheduler(mode='poly',
                                        baselr=args.lr,
                                        niters=len(self.train_data),
                                        nepochs=args.epochs)

        kv = mx.kv.create(args.kvstore)
        optimizer_params = {
            'lr_scheduler': self.lr_scheduler,
            'wd': args.weight_decay,
            'momentum': args.momentum
        }

        if args.dtype == 'float16':
            optimizer_params['multi_precision'] = True

        if args.no_wd:
            for k, v in self.net.module.collect_params(
                    '.*beta|.*gamma|.*bias').items():
                v.wd_mult = 0.0

        self.optimizer = gluon.Trainer(self.net.module.collect_params(),
                                       'sgd',
                                       optimizer_params,
                                       kvstore=kv)
        # evaluation metrics
        self.metric = gluoncv.utils.metrics.SegmentationMetric(
            trainset.num_class)
Ejemplo n.º 17
0
def get_mxnet_dataset(dataset_name, **kwargs):
    if dataset_name in ['ade20k', 'pascal_voc', 'pascal_aug']:
        return get_segmentation_dataset(name=dataset_name, **kwargs)
    else:
        return MxnetSegmentation(name=dataset_name, **kwargs)