コード例 #1
0
ファイル: converter.py プロジェクト: Umair772/VectorBlox-SDK
def convert_to_onnx(reporter, model, output_dir, args):
    reporter.print_section_heading('{}Converting {} to ONNX',
                                   '(DRY RUN) ' if args.dry_run else '',
                                   model.name)

    conversion_to_onnx_args = [
        string.Template(arg).substitute(
            conv_dir=output_dir / model.subdirectory,
            dl_dir=args.download_dir / model.subdirectory)
        for arg in model.conversion_to_onnx_args
    ]
    cmd = [
        str(args.python),
        str(Path(__file__).absolute().parent / model.converter_to_onnx),
        *conversion_to_onnx_args
    ]

    reporter.print('Conversion to ONNX command: {}',
                   common.command_string(cmd))
    reporter.print(flush=True)

    success = True if args.dry_run else reporter.job_context.subprocess(cmd)
    reporter.print()

    return success
コード例 #2
0
ファイル: converter.py プロジェクト: Umair772/VectorBlox-SDK
    def convert(reporter, model):
        if model.mo_args is None:
            reporter.print_section_heading(
                'Skipping {} (no conversions defined)', model.name)
            reporter.print()
            return True

        model_precisions = requested_precisions & model.precisions
        if not model_precisions:
            reporter.print_section_heading(
                'Skipping {} (all conversions skipped)', model.name)
            reporter.print()
            return True

        model_format = model.framework

        if model.conversion_to_onnx_args:
            if not convert_to_onnx(reporter, model, output_dir, args):
                return False
            model_format = 'onnx'

        expanded_mo_args = [
            string.Template(arg).substitute(
                dl_dir=args.download_dir / model.subdirectory,
                mo_dir=mo_path.parent,
                conv_dir=output_dir / model.subdirectory,
                config_dir=common.MODEL_ROOT / model.subdirectory)
            for arg in model.mo_args
        ]

        for model_precision in sorted(model_precisions):
            mo_cmd = [
                str(args.python), '--',
                str(mo_path), '--framework={}'.format(model_format),
                '--data_type={}'.format(model_precision),
                '--output_dir={}'.format(output_dir / model.subdirectory /
                                         model_precision),
                '--model_name={}'.format(model.name), *expanded_mo_args,
                *extra_mo_args
            ]

            reporter.print_section_heading(
                '{}Converting {} to IR ({})',
                '(DRY RUN) ' if args.dry_run else '', model.name,
                model_precision)

            reporter.print('Conversion command: {}',
                           common.command_string(mo_cmd))

            if not args.dry_run:
                reporter.print(flush=True)

                if not reporter.job_context.subprocess(mo_cmd):
                    return False

            reporter.print()

        return True
コード例 #3
0
def run_pre_convert(reporter, model, output_dir, args):
    script = common.MODEL_ROOT / model.subdirectory / 'pre-convert.py'
    if not script.exists():
        return True

    reporter.print_section_heading('{}Running pre-convert script for {}',
        '(DRY RUN) ' if args.dry_run else '', model.name)

    cmd = [str(args.python), '--', str(script), '--',
        str(args.download_dir / model.subdirectory), str(output_dir / model.subdirectory)]

    reporter.print('Pre-convert command: {}', common.command_string(cmd))
    reporter.print(flush=True)

    success = True if args.dry_run else reporter.job_context.subprocess(cmd)
    reporter.print()

    return success
コード例 #4
0
def quantize(reporter, model, precision, args, output_dir, pot_path, pot_env):
    input_precision = common.KNOWN_QUANTIZED_PRECISIONS[precision]

    pot_config_base_path = common.MODEL_ROOT / model.subdirectory / 'quantization.yml'

    try:
        with pot_config_base_path.open('rb') as pot_config_base_file:
            pot_config_base = yaml.safe_load(pot_config_base_file)
    except FileNotFoundError:
        pot_config_base = DEFAULT_POT_CONFIG_BASE

    pot_config_paths = {
        'engine': {
            'config':
            str(common.MODEL_ROOT / model.subdirectory / 'accuracy-check.yml'),
        },
        'model': {
            'model':
            str(args.model_dir / model.subdirectory / input_precision /
                (model.name + '.xml')),
            'weights':
            str(args.model_dir / model.subdirectory / input_precision /
                (model.name + '.bin')),
            'model_name':
            model.name,
        }
    }

    pot_config = {**pot_config_base, **pot_config_paths}

    if args.target_device:
        pot_config['compression']['target_device'] = args.target_device

    reporter.print_section_heading('{}Quantizing {} from {} to {}',
                                   '(DRY RUN) ' if args.dry_run else '',
                                   model.name, input_precision, precision)

    model_output_dir = output_dir / model.subdirectory / precision
    pot_config_path = model_output_dir / 'pot-config.json'

    reporter.print('Creating {}...', pot_config_path)
    pot_config_path.parent.mkdir(parents=True, exist_ok=True)
    with pot_config_path.open('w') as pot_config_file:
        json.dump(pot_config, pot_config_file, indent=4)
        pot_config_file.write('\n')

    pot_output_dir = model_output_dir / 'pot-output'
    pot_output_dir.mkdir(parents=True, exist_ok=True)

    pot_cmd = [
        str(args.python),
        '--',
        str(pot_path),
        '--config={}'.format(pot_config_path),
        '--direct-dump',
        '--output-dir={}'.format(pot_output_dir),
    ]

    reporter.print('Quantization command: {}', common.command_string(pot_cmd))
    reporter.print(
        'Quantization environment: {}',
        ' '.join('{}={}'.format(k, common.quote_arg(v))
                 for k, v in sorted(pot_env.items())))

    success = True

    if not args.dry_run:
        reporter.print(flush=True)

        success = reporter.job_context.subprocess(pot_cmd,
                                                  env={
                                                      **os.environ,
                                                      **pot_env
                                                  })

    reporter.print()
    if not success: return False

    if not args.dry_run:
        reporter.print('Moving quantized model to {}...', model_output_dir)
        for ext in ['.xml', '.bin']:
            (pot_output_dir / 'optimized' / (model.name + ext)).replace(
                model_output_dir / (model.name + ext))
        reporter.print()

    return True
コード例 #5
0
def quantize(model, precision, args, output_dir, pot_path, pot_env):
    input_precision = common.KNOWN_QUANTIZED_PRECISIONS[precision]

    pot_config = {
        'compression': {
            'algorithms': [
                {
                    'name': 'DefaultQuantization',
                    'params': {
                        'preset': 'performance',
                        'stat_subset_size': 300,
                    },
                },
            ],
        },
        'engine': {
            'config': str(OMZ_ROOT / 'tools/accuracy_checker/configs' / (model.name + '.yml')),
        },
        'model': {
            'model': str(args.model_dir / model.subdirectory / input_precision / (model.name + '.xml')),
            'weights': str(args.model_dir / model.subdirectory / input_precision / (model.name + '.bin')),
            'model_name': model.name,
        }
    }

    if args.target_device:
        pot_config['compression']['target_device'] = args.target_device

    print('========= {}Quantizing {} from {} to {}'.format(
        '(DRY RUN) ' if args.dry_run else '', model.name, input_precision, precision))

    model_output_dir = output_dir / model.subdirectory / precision
    pot_config_path = model_output_dir / 'pot-config.json'

    print('Creating {}...'.format(pot_config_path))
    pot_config_path.parent.mkdir(parents=True, exist_ok=True)
    with pot_config_path.open('w') as pot_config_file:
        json.dump(pot_config, pot_config_file, indent=4)
        pot_config_file.write('\n')

    pot_output_dir = model_output_dir / 'pot-output'
    pot_output_dir.mkdir(parents=True, exist_ok=True)

    pot_cmd = [str(args.python), '--', str(pot_path),
        '--config={}'.format(pot_config_path),
        '--direct-dump',
        '--output-dir={}'.format(pot_output_dir),
    ]

    print('Quantization command: {}'.format(common.command_string(pot_cmd)))
    print('Quantization environment: {}'.format(
        ' '.join('{}={}'.format(k, common.quote_arg(v))
            for k, v in sorted(pot_env.items()))))

    success = True

    if not args.dry_run:
        print('', flush=True)

        success = subprocess.run(pot_cmd, env={**os.environ, **pot_env}).returncode == 0

    print('')
    if not success: return False

    if not args.dry_run:
        print('Moving quantized model to {}...'.format(model_output_dir))
        for ext in ['.xml', '.bin']:
            (pot_output_dir / 'optimized' / (model.name + ext)).replace(
                model_output_dir / (model.name + ext))
        print('')

    return True