예제 #1
0
def get_lib_path(lib_name):
    """Function for getting absolute path in OpenVINO directory to specific lib"""
    os_name = get_os_name()
    all_libs = {
        'inference_engine_transformations': {
            'Windows':
            Path(
                'deployment_tools/inference_engine/bin/intel64/Release/inference_engine_transformations.dll'
            ),
            'Linux':
            Path(
                'deployment_tools/inference_engine/lib/intel64/libinference_engine_transformations.so'
            )
        },
        'MKLDNNPlugin': {
            'Windows':
            Path(
                'deployment_tools/inference_engine/bin/intel64/Release/MKLDNNPlugin.dll'
            ),
            'Linux':
            Path(
                'deployment_tools/inference_engine/lib/intel64/libMKLDNNPlugin.so'
            )
        },
        'ngraph': {
            'Windows': Path('deployment_tools/ngraph/lib/ngraph.dll'),
            'Linux': Path('deployment_tools/ngraph/lib/libngraph.so')
        }
    }
    return all_libs[lib_name][os_name]
예제 #2
0
def get_lib_path(lib_name):
    """Function for getting absolute path in OpenVINO directory to specific lib"""
    os_name = get_os_name()
    all_libs = {
        'MKLDNNPlugin': {
            'Windows': Path('runtime/bin/intel64/Release/MKLDNNPlugin.dll'),
            'Linux': Path('runtime/lib/intel64/libMKLDNNPlugin.so')
        },
        'ov_runtime': {
            'Windows': Path('runtime/bin/intel64/Release/ov_runtime.dll'),
            'Linux': Path('runtime/lib/intel64/libov_runtime.so')
        }
    }
    return all_libs[lib_name][os_name]
예제 #3
0
def prepare_db_info(request, test_info, executable, niter, manifest_metadata):
    """Fixture for preparing and validating data to submit to a database.
    Fixture prepares data and metadata to submit to a database. One of the steps
    is parsing of build information from build manifest. After preparation,
    it checks if data contains required properties.
    """
    FIELDS_FOR_ID = ['run_id', 'timetest', 'model', 'device', 'niter']

    run_id = request.config.getoption("db_submit")
    if not run_id:
        yield
        return

    # add db_metadata
    db_meta_path = request.config.getoption("db_metadata")
    if db_meta_path:
        with open(db_meta_path, "r") as db_meta_f:
            test_info["db_info"].update(json.load(db_meta_f))

    # add test info
    info = {
        # results will be added immediately before uploading to DB in `pytest_runtest_makereport`
        "run_id": run_id,
        "timetest": str(executable.stem),
        "model": request.node.funcargs["instance"]["model"],
        "device": request.node.funcargs["instance"]["device"],
        "niter": niter,
        "test_name": request.node.name,
        "os": "_".join([str(item) for item in [get_os_name(), *get_os_version()]])
    }
    info['_id'] = hashlib.sha256(
        ''.join([str(info[key]) for key in FIELDS_FOR_ID]).encode()).hexdigest()
    test_info["db_info"].update(info)

    # add manifest metadata
    test_info["db_info"].update(manifest_metadata)

    # validate db_info
    schema = """
    {
        "type": "object",
        "properties": {
            "device": {
                "type": "object",
                "properties": {
                    "name": {"type": "string"}
                },
                "required": ["name"]
            },
            "model": {
                "type": "object",
                "properties": {
                    "path": {"type": "string"},
                    "name": {"type": "string"},
                    "precision": {"type": "string"},
                    "framework": {"type": "string"}
                },
                "required": ["path", "name", "precision", "framework"]
            },
            "run_id": {"type": "string"},
            "timetest": {"type": "string"},
            "niter": {"type": "integer"},
            "test_name": {"type": "string"},
            "results": {"type": "object"},
            "os": {"type": "string"},
            "_id": {"type": "string"}
        },
        "required": ["device", "model", "run_id", "timetest", "niter", "test_name", "os", "_id"],
        "additionalProperties": true
    }
    """
    schema = json.loads(schema)

    try:
        validate(instance=test_info["db_info"], schema=schema)
    except ValidationError:
        request.config.option.db_submit = False
        raise
    yield
예제 #4
0
def prepare_db_info(request, instance, executable, niter, manifest_metadata):
    """Fixture for preparing and validating data to submit to a database.

    Fixture prepares data and metadata to submit to a database. One of the steps
    is parsing of build information from build manifest. After preparation,
    it checks if data contains required properties.
    """
    FIELDS_FOR_ID = ['run_id', "test_exe", 'model', 'device', 'niter']

    run_id = request.config.getoption("db_submit")
    if not run_id:
        yield
        return

    instance["db"] = {}

    # add db_metadata
    db_meta_path = request.config.getoption("db_metadata")
    if db_meta_path:
        with open(db_meta_path, "r") as db_meta_f:
            instance["db"].update(json.load(db_meta_f))

    # add test info
    info = {
        # results will be added immediately before uploading to DB in `pytest_runtest_makereport`.
        **instance["orig_instance"],  # TODO: think about use `instance` instead of `orig_instance`
        "run_id":
        run_id,
        "test_exe":
        str(executable.stem),
        "niter":
        niter,
        "test_name":
        request.node.name,
        "os":
        "_".join([str(item) for item in [get_os_name(), *get_os_version()]]),
        "cpu_info":
        get_cpu_info(),
        "status":
        "not_finished",
        "error_msg":
        "",
        "results": {},
        "raw_results": {},
        "references":
        instance["instance"].get(
            "references", {}),  # upload actual references that were used
        "ref_factor":
        REFS_FACTOR,
    }
    info['_id'] = hashlib.sha256(''.join(
        [str(info[key]) for key in FIELDS_FOR_ID]).encode()).hexdigest()

    # add metadata
    instance["db"].update(info)
    # add manifest metadata
    instance["db"].update(manifest_metadata)

    # validate db_info
    schema = """
    {
        "type": "object",
        "properties": {
            "device": {
                "type": "object",
                "properties": {
                    "name": {"type": "string"}
                },
                "required": ["name"]
            },
            "model": {
                "type": "object",
                "properties": {
                    "name": {"type": "string"},
                    "precision": {"type": "string"},
                    "framework": {"type": "string"}
                },
                "required": ["name", "precision"]
            },
            "run_id": {"type": "string"},
            "test_exe": {"type": "string"},
            "niter": {"type": "integer"},
            "test_name": {"type": "string"},
            "os": {"type": "string"},
            "cpu_info": {"type": "string"},
            "status": {"type": "string"},
            "error_msg": {"type": "string"},
            "results": {"type": "object"},
            "raw_results": {"type": "object"},
            "references": {"type": "object"},
            "_id": {"type": "string"}
        },
        "required": ["device", "model", "run_id", "test_exe", "niter", "test_name", "os", "cpu_info", 
                     "status", "error_msg", "results", "raw_results", "references", "_id"],
        "additionalProperties": true
    }
    """
    schema = json.loads(schema)

    try:
        validate(instance=instance["db"], schema=schema)
    except ValidationError:
        request.config.option.db_submit = False
        raise
    yield
    instance["db"]["results"] = instance["results"]
    instance["db"]["raw_results"] = instance["raw_results"]