def get_dataset(self):
     validation_threshold = self.validation_threshold
     self.train_dataset = gdata.RecordFileDetection(self.train_file)
     self.val_dataset = gdata.RecordFileDetection(self.val_file)
     # we are only using VOCMetric for evaluation
     self.val_metric = VOC07MApMetric(iou_thresh=validation_threshold,
                                      class_names=self.net.classes)
예제 #2
0
def get_dataloader(net, data_shape, batch_size, num_workers, ctx):
    """Get dataloader."""
    import os

    os.system('pip3 install gluoncv --pre')

    from gluoncv import data as gdata
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform

    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx))
    anchors = anchors.as_in_context(mx.cpu())
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_dataset = gdata.RecordFileDetection(
        os.path.join(os.environ['SM_CHANNEL_TRAIN'], 'train.rec'))
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    return train_loader
예제 #3
0
def get_dataloader(net, data_shape, batch_size, num_workers, ctx):
    """Get dataloader."""

    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx))
    anchors = anchors.as_in_context(mx.cpu())
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets

    # can I point that to a bundle of png files instead?
    train_dataset = gdata.RecordFileDetection(
        os.path.join(os.environ['SM_CHANNEL_TRAIN'], 'train.rec'))

    # this is the folder with all the training images
    train_folder = os.environ['SM_CHANNEL_TRAIN']

    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    return train_loader
예제 #4
0
    def __init__(self,
                 model_path,
                 model='ssd300_vgg16_voc',
                 ctx='gpu',
                 threshold=0.5,
                 validation_threshold=0.5,
                 batch_size=4,
                 num_workers=2,
                 nms_threshold=0.5):
        self.model_path = model_path
        self.threshold = threshold
        self.validation_threshold = validation_threshold
        self.batch_size = batch_size
        self.num_workers = num_workers
        self.nms_threshold = nms_threshold

        classes_keys = [key for key in data_common['classes']]
        self.classes = classes_keys

        if ctx == 'cpu':
            self.ctx = [mx.cpu()]
        elif ctx == 'gpu':
            self.ctx = [mx.gpu(0)]
        else:
            raise ValueError('Invalid context.')

        self.width, self.height = dataset_commons.get_model_prop(model)
        self.model_name = model

        self.val_file = data_common['record_val_path']

        net = get_model(self.model_name, pretrained=False, ctx=self.ctx)
        # net.set_nms(nms_thresh=0.5, nms_topk=2)
        net.hybridize(static_alloc=True, static_shape=True)
        net.initialize(force_reinit=True, ctx=self.ctx)
        net.reset_class(classes=self.classes)
        net.load_parameters(self.model_path, ctx=self.ctx)
        self.net = net

        self.val_dataset = gdata.RecordFileDetection(self.val_file)
        self.val_metric = VOC07MApMetric(iou_thresh=validation_threshold,
                                         class_names=self.net.classes)

        # Val verdadeiro
        val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(self.val_dataset.transform(
            SSDDefaultValTransform(self.width, self.height)),
                                           batch_size,
                                           False,
                                           batchify_fn=val_batchify_fn,
                                           last_batch='keep',
                                           num_workers=self.num_workers)
        self.val_loader = val_loader
예제 #5
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(root=args.data_path,
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(root=args.data_path,
            splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        #train_dataset = gdata.COCODetection(splits='instances_train2014', use_crowd=False)
        train_dataset = gdata.COCODetection(root=args.data_path, splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.data_path, splits='instances_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset, args.save_prefix + '_eval', cleanup=True)
    elif dataset.lower() == 'rec':
        train_dataset = gdata.RecordFileDetection(os.path.join(args.data_path, 'pikachu_train.rec'))
        val_dataset = gdata.RecordFileDetection(os.path.join(args.data_path, 'pikachu_train.rec'))
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=rec_classes)
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
    def __init__(self,
                 model='ssd300',
                 ctx='gpu',
                 batch_size=4,
                 num_workers=2,
                 validation_threshold=0.5,
                 nms_threshold=0.5,
                 param_path=None):
        """
        Script responsible for training the class

        Arguments:
            model (str): One of the following models [ssd_300_vgg16_atrous_voc]
            num_worker (int, default: 2): number to accelerate data loading
            dataset (str, default:'voc'): Training dataset. Now support voc.
            batch_size (int, default: 4): Training mini-batch size
        """

        # EVALUATION PARAMETERS
        self.batch_size = batch_size
        self.num_workers = num_workers
        self.validation_threshold = validation_threshold
        self.nms_threshold = nms_threshold

        if ctx == 'cpu':
            self.ctx = [mx.cpu()]
        elif ctx == 'gpu':
            self.ctx = [mx.gpu(0)]
        else:
            raise ValueError('Invalid context.')

        # fix seed for mxnet, numpy and python builtin random generator.
        gutils.random.seed(233)

        self.width, self.height, _ = dataset_commons.get_model_prop(model)
        self.model_name = model

        # TODO: load the train and val rec file
        self.val_file = data_common['record_val_path']

        self.classes = [
            'bar_clamp', 'gear_box', 'vase', 'part_1', 'part_3', 'nozzle',
            'pawn', 'turbine_housing'
        ]  # please, follow the order of the config.json file
        print('Classes: ', self.classes)

        net = get_model(self.model_name, pretrained=False, ctx=self.ctx)
        # net.set_nms(nms_thresh=0.5, nms_topk=2)
        net.hybridize(static_alloc=True, static_shape=True)
        net.initialize(force_reinit=True, ctx=self.ctx)
        net.reset_class(classes=self.classes)
        net.load_parameters(param_path, ctx=self.ctx)
        self.net = net

        val_dataset = gdata.RecordFileDetection(self.val_file)

        # Val verdadeiro
        val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(val_dataset.transform(
            SSDDefaultValTransform(self.width, self.height)),
                                           batch_size,
                                           False,
                                           batchify_fn=val_batchify_fn,
                                           last_batch='keep',
                                           num_workers=num_workers)

        self.val_loader = val_loader
예제 #7
0
import numpy as np
import matplotlib.pyplot as plt
import cv2

import mxnet as mx
from mxnet import gluon, nd
import gluoncv as gcv
from gluoncv import data, utils
from gluon_utils.model_zoo.centernet import get_center_net_transfer

classes = ['pikachu']
net = get_center_net_transfer('center_net_resnet18_v1b_coco',
                              classes,
                              pretrained=True)
net.load_parameters("data/checkpoints/epoch_9.params")

dataset = data.RecordFileDetection('data/pikachu_train.rec')
image, label = dataset[0]
image = cv2.resize(image.asnumpy(), (512, 512))
x, img = data.transforms.presets.center_net.transform_test(nd.array(image),
                                                           short=512)

class_IDs, scores, bounding_boxes = net(x)
ax = utils.viz.plot_bbox(img,
                         bounding_boxes[0],
                         scores[0],
                         class_IDs[0],
                         class_names=net.classes)
plt.show()