Esempio n. 1
0
def dump_infer_config(FLAGS, config):
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    infer_cfg = OrderedDict({
        'use_python_inference': False,
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric']
    })
    trt_min_subgraph = {
        'YOLO': 3,
        'SSD': 3,
        'RCNN': 40,
        'RetinaNet': 40,
        'Face': 3,
        'TTFNet': 3,
    }
    infer_arch = config['architecture']

    for arch, min_subgraph_size in trt_min_subgraph.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            break

    if 'Mask' in config['architecture']:
        infer_cfg['mask_resolution'] = config['MaskHead']['resolution']
    infer_cfg['with_background'], infer_cfg['Preprocess'], infer_cfg[
        'label_list'] = parse_reader(config['TestReader'], config['metric'],
                                     infer_cfg['arch'])

    yaml.dump(infer_cfg,
              open(os.path.join(FLAGS.output_dir, 'infer_cfg.yml'), 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(FLAGS.output_dir, 'infer_cfg.yml')))
Esempio n. 2
0
def dump_infer_config(FLAGS, config):
    cfg_name = os.path.basename(FLAGS.config).split('.')[0]
    save_dir = os.path.join(FLAGS.output_dir, cfg_name)
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    infer_cfg = OrderedDict({
        'use_python_inference': False,
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric']
    })
    infer_arch = config['architecture']

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            break

    if 'Mask' in config['architecture']:
        infer_cfg['mask_resolution'] = config['MaskHead']['resolution']
    infer_cfg['with_background'], infer_cfg['Preprocess'], infer_cfg[
        'label_list'] = parse_reader(config['TestReader'], config['metric'],
                                     infer_cfg['arch'])

    yaml.dump(infer_cfg, open(os.path.join(save_dir, 'infer_cfg.yml'), 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(save_dir, 'infer_cfg.yml')))
Esempio n. 3
0
def _dump_infer_config(config, path, image_shape, model):
    arch_state = False
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    infer_cfg = OrderedDict({
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric'],
        'image_shape': image_shape
    })
    infer_arch = config['architecture']

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            arch_state = True
            break
    if not arch_state:
        logger.error(
            'Architecture: {} is not supported for exporting model now'.format(
                infer_arch))
        os._exit(0)
    if 'mask_post_process' in model.__dict__ and model.__dict__[
            'mask_post_process']:
        infer_cfg['mask_resolution'] = model.mask_post_process.mask_resolution
    infer_cfg['with_background'], infer_cfg['Preprocess'], infer_cfg[
        'label_list'], image_shape = _parse_reader(
            config['TestReader'], config['TestDataset'], config['metric'],
            infer_cfg['arch'], image_shape)

    yaml.dump(infer_cfg, open(path, 'w'))
    logger.info("Export inference config file to {}".format(os.path.join(path)))
    return image_shape
Esempio n. 4
0
def _dump_infer_config(config, path, image_shape, model):
    arch_state = False
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    use_dynamic_shape = True if image_shape[1] == -1 else False
    infer_cfg = OrderedDict({
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric'],
        'use_dynamic_shape': use_dynamic_shape
    })
    infer_arch = config['architecture']

    if infer_arch in MOT_ARCH:
        if infer_arch == 'DeepSORT':
            tracker_cfg = config['DeepSORTTracker']
        else:
            tracker_cfg = config['JDETracker']
        infer_cfg['tracker'] = _parse_tracker(tracker_cfg)

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            arch_state = True
            break
    if not arch_state:
        logger.error(
            'Architecture: {} is not supported for exporting model now'.format(
                infer_arch))
        os._exit(0)
    if 'mask_head' in config[config['architecture']] and config[
            config['architecture']]['mask_head']:
        infer_cfg['mask'] = True
    label_arch = 'detection_arch'
    if infer_arch in KEYPOINT_ARCH:
        label_arch = 'keypoint_arch'

    if infer_arch in MOT_ARCH:
        label_arch = 'mot_arch'
        reader_cfg = config['TestMOTReader']
        dataset_cfg = config['TestMOTDataset']
    else:
        reader_cfg = config['TestReader']
        dataset_cfg = config['TestDataset']

    infer_cfg['Preprocess'], infer_cfg['label_list'] = _parse_reader(
        reader_cfg, dataset_cfg, config['metric'], label_arch, image_shape)

    yaml.dump(infer_cfg, open(path, 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(path)))
Esempio n. 5
0
def dump_infer_config(FLAGS, config):
    arch_state = 0
    cfg_name = os.path.basename(FLAGS.config).split('.')[0]
    save_dir = os.path.join(FLAGS.output_dir, cfg_name)
    if not os.path.exists(save_dir):
        os.makedirs(save_dir)
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    infer_cfg = OrderedDict({
        'use_python_inference': False,
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric']
    })
    infer_arch = config['architecture']

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            arch_state = 1
            break
    if not arch_state:
        logger.error(
            'Architecture: {} is not supported for exporting model now'.format(
                infer_arch))
        os._exit(0)

    # support land mark output
    if 'with_lmk' in config and config['with_lmk'] == True:
        infer_cfg['with_lmk'] = True

    if 'Mask' in config['architecture']:
        infer_cfg['mask_resolution'] = config['MaskHead']['resolution']
    infer_cfg['with_background'], infer_cfg['Preprocess'], infer_cfg[
        'label_list'] = parse_reader(config['TestReader'], config['metric'],
                                     infer_cfg['arch'])

    yaml.dump(infer_cfg, open(os.path.join(save_dir, 'infer_cfg.yml'), 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(save_dir, 'infer_cfg.yml')))
Esempio n. 6
0
def _dump_infer_config(config, path, image_shape, model):
    arch_state = False
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    infer_cfg = OrderedDict({
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric'],
    })
    infer_arch = config['architecture']

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            arch_state = True
            break
    if not arch_state:
        logger.error(
            'Architecture: {} is not supported for exporting model now'.format(
                infer_arch))
        os._exit(0)
    if 'Mask' in infer_arch:
        infer_cfg['mask'] = True
    label_arch = 'detection_arch'
    if infer_arch in KEYPOINT_ARCH:
        label_arch = 'keypoint_arch'
    infer_cfg['Preprocess'], infer_cfg['label_list'] = _parse_reader(
        config['TestReader'], config['TestDataset'], config['metric'],
        label_arch, image_shape)

    if infer_arch == 'S2ANet':
        # TODO: move background to num_classes
        if infer_cfg['label_list'][0] != 'background':
            infer_cfg['label_list'].insert(0, 'background')

    yaml.dump(infer_cfg, open(path, 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(path)))
Esempio n. 7
0
def _dump_infer_config(config, path, image_shape, model):
    arch_state = False
    from ppdet.core.config.yaml_helpers import setup_orderdict
    setup_orderdict()
    use_dynamic_shape = True if image_shape[2] == -1 else False
    infer_cfg = OrderedDict({
        'mode': 'fluid',
        'draw_threshold': 0.5,
        'metric': config['metric'],
        'use_dynamic_shape': use_dynamic_shape
    })
    infer_arch = config['architecture']

    if infer_arch in MOT_ARCH:
        if infer_arch == 'DeepSORT':
            tracker_cfg = config['DeepSORTTracker']
        else:
            tracker_cfg = config['JDETracker']
        infer_cfg['tracker'] = _parse_tracker(tracker_cfg)

    for arch, min_subgraph_size in TRT_MIN_SUBGRAPH.items():
        if arch in infer_arch:
            infer_cfg['arch'] = arch
            infer_cfg['min_subgraph_size'] = min_subgraph_size
            arch_state = True
            break
    if not arch_state:
        logger.error(
            'Architecture: {} is not supported for exporting model now.\n'.
            format(infer_arch) +
            'Please set TRT_MIN_SUBGRAPH in ppdet/engine/export_utils.py')
        os._exit(0)
    if 'mask_head' in config[config['architecture']] and config[
            config['architecture']]['mask_head']:
        infer_cfg['mask'] = True
    label_arch = 'detection_arch'
    if infer_arch in KEYPOINT_ARCH:
        label_arch = 'keypoint_arch'

    if infer_arch in MOT_ARCH:
        label_arch = 'mot_arch'
        reader_cfg = config['TestMOTReader']
        dataset_cfg = config['TestMOTDataset']
    else:
        reader_cfg = config['TestReader']
        dataset_cfg = config['TestDataset']

    infer_cfg['Preprocess'], infer_cfg['label_list'] = _parse_reader(
        reader_cfg, dataset_cfg, config['metric'], label_arch, image_shape[1:])

    if infer_arch == 'PicoDet':
        infer_cfg['NMS'] = config['PicoHead']['nms']
        # In order to speed up the prediction, the threshold of nms
        # is adjusted here, which can be changed in infer_cfg.yml
        config['PicoHead']['nms']["score_threshold"] = 0.3
        config['PicoHead']['nms']["nms_threshold"] = 0.5
        infer_cfg['fpn_stride'] = config['PicoHead']['fpn_stride']

    yaml.dump(infer_cfg, open(path, 'w'))
    logger.info("Export inference config file to {}".format(
        os.path.join(path)))