Пример #1
0
def evaluate_net(net, dataset, devkit_path, mean_pixels, data_shape,
                 model_prefix, epoch, ctx, year=None, sets='test',
                 batch_size=1, nms_thresh=0.5, force_nms=False):
    """
    Evaluate entire dataset, basically simple wrapper for detections

    Parameters:
    ---------
    dataset : str
        name of dataset to evaluate
    devkit_path : str
        root directory of dataset
    mean_pixels : tuple of float
        (R, G, B) mean pixel values
    data_shape : int
        resize input data shape
    model_prefix : str
        load model prefix
    epoch : int
        load model epoch
    ctx : mx.ctx
        running context, mx.cpu() or mx.gpu(0)...
    year : str or None
        evaluate on which year's data
    sets : str
        evaluation set
    batch_size : int
        using batch_size for evaluation
    nms_thresh : float
        non-maximum suppression threshold
    force_nms : bool
        force suppress different categories
    """
    # set up logger
    logging.basicConfig()
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    if dataset == "pascal":
        if not year:
            year = '2007'
        imdb = PascalVoc(sets, year, devkit_path, shuffle=False, is_train=False)
        data_iter = DetIter(imdb, batch_size, data_shape, mean_pixels,
            rand_samplers=[], rand_mirror=False, is_train=False, shuffle=False)
        sys.path.append(os.path.join(cfg.ROOT_DIR, 'symbol'))
        net = importlib.import_module("symbol_" + net) \
            .get_symbol(imdb.num_classes, nms_thresh, force_nms)
        model_prefix += "_" + str(data_shape)
        detector = Detector(net, model_prefix, epoch, data_shape, mean_pixels, batch_size, ctx)
        logger.info("Start evaluation with {} images, be patient...".format(imdb.num_images))
        detections = detector.detect(data_iter)
        imdb.evaluate_detections(detections)
    else:
        raise NotImplementedError, "No support for dataset: " + dataset
Пример #2
0
def evaluate_net(net, dataset, devkit_path, mean_pixels, data_shape,
                 model_prefix, epoch, ctx, year=None, sets='test',
                 batch_size=1, nms_thresh=0.5, force_nms=False):
    """
    Evaluate entire dataset, basically simple wrapper for detections

    Parameters:
    ---------
    dataset : str
        name of dataset to evaluate
    devkit_path : str
        root directory of dataset
    mean_pixels : tuple of float
        (R, G, B) mean pixel values
    data_shape : int
        resize input data shape
    model_prefix : str
        load model prefix
    epoch : int
        load model epoch
    ctx : mx.ctx
        running context, mx.cpu() or mx.gpu(0)...
    year : str or None
        evaluate on which year's data
    sets : str
        evaluation set
    batch_size : int
        using batch_size for evaluation
    nms_thresh : float
        non-maximum suppression threshold
    force_nms : bool
        force suppress different categories
    """
    # set up logger
    logging.basicConfig()
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    if dataset == "pascal":
        if not year:
            year = '2007'
        imdb = PascalVoc(sets, year, devkit_path, shuffle=False, is_train=False)
        data_iter = DetIter(imdb, batch_size, data_shape, mean_pixels,
            rand_samplers=[], rand_mirror=False, is_train=False, shuffle=False)
        sys.path.append(os.path.join(cfg.ROOT_DIR, 'symbol'))
        net = importlib.import_module("symbol_" + net) \
            .get_symbol(imdb.num_classes, nms_thresh, force_nms)
        model_prefix += "_" + str(data_shape)
        detector = Detector(net, model_prefix, epoch, data_shape, mean_pixels, batch_size, ctx)
        logger.info("Start evaluation with {} images, be patient...".format(imdb.num_images))
        detections = detector.detect(data_iter)
        imdb.evaluate_detections(detections)
    else:
        raise NotImplementedError("No support for dataset: " + dataset)
def load_pascal(image_set,
                year,
                devkit_path,
                shuffle=False,
                class_names=None,
                true_negative=None):
    """
    wrapper function for loading pascal voc dataset

    Parameters:
    ----------
    image_set : str
        train, trainval...
    year : str
        2007, 2012 or combinations splitted by comma
    devkit_path : str
        root directory of dataset
    shuffle : bool
        whether to shuffle initial list

    Returns:
    ----------
    Imdb
    """
    image_set = [y.strip() for y in image_set.split(',')]
    assert image_set, "No image_set specified"
    year = [y.strip() for y in year.split(',')]
    assert year, "No year specified"

    # make sure (# sets == # years)
    if len(image_set) > 1 and len(year) == 1:
        year = year * len(image_set)
    if len(image_set) == 1 and len(year) > 1:
        image_set = image_set * len(year)
    assert len(image_set) == len(year), "Number of sets and year mismatch"

    imdbs = []
    for s, y in zip(image_set, year):
        imdbs.append(
            PascalVoc(s,
                      y,
                      devkit_path,
                      shuffle,
                      is_train=True,
                      class_names=class_names,
                      true_negative_images=true_negative))
    if len(imdbs) > 1:
        return ConcatDB(imdbs, shuffle)
    else:
        return imdbs[0]
Пример #4
0
def load_pascal(shuffle=False, is_valid=False):
    """
    wrapper function for loading pascal voc dataset

    Parameters:
    ----------
    image_set : str
        train, trainval...
    year : str
        2007, 2012 or combinations splitted by comma
    devkit_path : str
        root directory of dataset
    shuffle : bool
        whether to shuffle initial list

    Returns:
    ----------
    Imdb
    """
    root_path = private_config.root_path
    if is_valid:
        image_set = private_config.valid_image_set
        data_path = private_config.valid_data_path
    else:
        data_path = private_config.data_path

        image_set = private_config.image_set

    image_set = [y.strip() for y in image_set.split(',')]
    assert image_set, "No image_set specified"

    imdbs = []
    for s in image_set:

        imdbs.append(
            PascalVoc(s,
                      data_path=data_path,
                      root_path=root_path,
                      shuffle=shuffle,
                      is_train=True))
    if len(imdbs) > 1:
        raise NotImplementedError
        return ConcatDB(imdbs, shuffle)
    else:
        return imdbs[0]
Пример #5
0
        RandCropper(min_scale=.3, min_aspect_ratio=.5, max_aspect_ratio=2., min_overlap=.1),
        RandCropper(min_scale=.3, min_aspect_ratio=.5, max_aspect_ratio=2., min_overlap=.3),
        RandCropper(min_scale=.3, min_aspect_ratio=.5, max_aspect_ratio=2., min_overlap=.5),
        RandCropper(min_scale=.3, min_aspect_ratio=.5, max_aspect_ratio=2., min_overlap=.7),
        RandPadder(max_scale=2., min_aspect_ratio=.5, max_aspect_ratio=2., min_gt_scale=.05),
        RandPadder(max_scale=3., min_aspect_ratio=.5, max_aspect_ratio=2., min_gt_scale=.05),
        RandPadder(max_scale=4., min_aspect_ratio=.5, max_aspect_ratio=2., min_gt_scale=.05)
    ],
    "random_flip": True,
    "shuffle": True,
    "random_seed": None
}

# # validation
# cfg.VALID = edict()
# cfg.VALID.RAND_SAMPLERS = []
# cfg.VALID.RAND_MIRROR = False
# cfg.VALID.INIT_SHUFFLE = False
# cfg.VALID.EPOCH_SHUFFLE = False
# cfg.VALID.RAND_SEED = None

ssd_prov = DetIter(
        imdb=PascalVoc("trainval", "2007", "/unsullied/sharefs/yugang/Dataset/VOC", is_train=True),
        batch_size=32, 
        data_shape=(300, 300), 
        mean_pixels=[104, 117, 123], 
        rand_samplers=train_cfg['random_samplers'],
        rand_flip=train_cfg['random_flip'],
        shuffle=train_cfg['shuffle'],
        rand_seed=train_cfg['random_seed']
    )