コード例 #1
0
ファイル: test_sparsity.py プロジェクト: terfendail/openvino
def test_sparsity_algo(test_models, tmp_path, models):
    model_name, model_framework, algorithm, sparsity_level, normed_threshold, ref_name = test_models
    algorithm_config = Dict({
        'algorithms': [{
            'name': algorithm,
            'params': {
                'sparsity_level': sparsity_level,
                'normed_threshold': normed_threshold,
            }
        }]
    })

    model = models.get(model_name, model_framework, tmp_path)

    engine_config = get_engine_config(model_name)
    config = merge_configs(model.model_params, engine_config, algorithm_config)
    config.engine.evaluate = False
    config.engine.type = 'accuracy_checker'

    _ = optimize(config)
    output_dir = os.path.join(config.model.exec_log_dir, 'optimized')
    xml_path = os.path.join(output_dir, config.model.model_name + '.xml')
    bin_path = os.path.join(output_dir, config.model.model_name + '.bin')
    output_model, meta = stdout_redirect(restore_graph_from_ir, xml_path,
                                         bin_path)
    output_model.meta_data = meta

    assert check_sparsity_level(NXModel(graph=output_model), config,
                                sparsity_level)
    check_graph(tmp_path,
                output_model,
                model_name + ref_name,
                model_framework,
                check_weights=True)
コード例 #2
0
ファイル: test_sparsity.py プロジェクト: terfendail/openvino
def test_sparsity(test_models, tmp_path, models):
    model_name, model_framework, algorithm, sparsity_level, normed_threshold, expected_accuracy = test_models
    algorithm_config = Dict({
        'algorithms': [{
            'name': algorithm,
            'params': {
                'sparsity_level': sparsity_level,
                'normed_threshold': normed_threshold,
            }
        }]
    })

    if algorithm == 'WeightSparsity':
        bias_config = Dict({'target_device': 'CPU', 'stat_subset_size': 300})
        algorithm_config['algorithms'][0]['params'].update(bias_config)

    model = models.get(model_name, model_framework, tmp_path)

    engine_config = get_engine_config(model_name)
    config = merge_configs(model.model_params, engine_config, algorithm_config)
    config.engine.models[0].datasets[0].subsample_size = 1000

    metrics = optimize(config)

    output_dir = os.path.join(config.model.exec_log_dir, 'optimized')

    for metric_name in metrics:
        print('{}: {:.4f}'.format(metric_name, metrics[metric_name]))

    assert metrics == pytest.approx(expected_accuracy, abs=0.006)
    xml_path = os.path.join(output_dir, config.model.model_name + '.xml')
    bin_path = os.path.join(output_dir, config.model.model_name + '.bin')
    assert os.path.exists(xml_path)
    assert os.path.exists(bin_path)

    # Check resulting sparsity level
    model, _ = stdout_redirect(restore_graph_from_ir, xml_path, bin_path)
    assert check_sparsity_level(NXModel(graph=model), config, sparsity_level)