コード例 #1
0
def run_ovtf_cpp_gtests(artifacts_dir, log_dir, filters):
    root_pwd = os.getcwd()
    artifacts_dir = os.path.abspath(artifacts_dir)
    log_dir = os.path.abspath(log_dir)

    # Check if we can run C++ tests
    if not os.path.exists(os.path.join(artifacts_dir, "test/gtest_ovtf")):
        print("gtest_ovtf not found. Skipping C++ unit tests...")
        return

    os.environ['GTEST_OUTPUT'] = 'xml:%s/xunit_gtest.xml' % log_dir

    if not os.path.isdir(artifacts_dir):
        raise Exception("Artifacts directory doesn't exist: " + artifacts_dir)

    # First run the C++ gtests
    lib_dir = TestEnv.get_platform_lib_dir()

    os.environ['LD_LIBRARY_PATH'] = os.getenv(
        "LD_LIBRARY_PATH", "") + ':' + os.path.join(artifacts_dir, lib_dir)
    if not os.path.exists(artifacts_dir):
        raise AssertionError("Could not find directory")
    os.chdir(os.path.join(artifacts_dir, "test"))
    if (filters != None):
        gtest_filters = "--gtest_filter=" + filters
        cmd = ['./gtest_ovtf', gtest_filters]
    else:
        cmd = ['./gtest_ovtf']

    command_executor(cmd)
    if not os.path.exists(root_pwd):
        raise AssertionError("Could not find directory")
    os.chdir(root_pwd)
コード例 #2
0
def helper(self, p0_shape, p1_shape, p0_actual_shape, p1_actual_shape,
           shapehints):
    env_var_map = self.store_env_variables(["NGRAPH_TF_BACKEND"])
    self.set_env_variable("NGRAPH_TF_BACKEND", "INTERPRETER")

    inp0 = get_inputs(p0_actual_shape)
    inp1 = get_inputs(p1_actual_shape)
    x, y, z, temp_in_pbtxt_name = create_graph(p0_shape, p1_shape)
    temp_out_pbtxt_name = get_pbtxt_name('temp_graph_out_', p0_shape, p1_shape)
    json_name = 'temp_config_file.json'
    # shapehints is a list of dictionaries (keys are node names, vals are lists (of shapes))
    Tf2ngraphJson.dump_json(json_name, None, shapehints)

    command_executor('python ../../tools/tf2ngraph.py --input_pbtxt ' +
                     temp_in_pbtxt_name + ' --output_nodes z --output_pbtxt ' +
                     temp_out_pbtxt_name + ' --ng_backend INTERPRETER ' +
                     ' --config_file ' + json_name + ' --precompile')

    num_expected_execs = (len(shapehints), 1)[len(shapehints) == 0]
    check_pbtxt_has_exec(temp_out_pbtxt_name, num_expected_execs)

    tf_out_val = run_pbtxt(temp_in_pbtxt_name, inp0, inp1)
    ng_out_vals = run_pbtxt(temp_out_pbtxt_name, inp0, inp1)
    assert ((tf_out_val == ng_out_vals).all())

    os.remove(temp_in_pbtxt_name)
    os.remove(temp_out_pbtxt_name)
    os.remove(json_name)

    self.restore_env_variables(env_var_map)
コード例 #3
0
ファイル: test_utils.py プロジェクト: iSwapna/ngraph-bridge
def run_ngtf_cpp_gtests(artifacts_dir, log_dir, filters):
    root_pwd = os.getcwd()
    artifacts_dir = os.path.abspath(artifacts_dir)
    log_dir = os.path.abspath(log_dir)

    # Check if we can run C++ tests
    if not os.path.exists(os.path.join(artifacts_dir, "test/gtest_ngtf")):
        print("gtest_ngtf not found. Skipping C++ unit tests...")
        return

    os.environ['GTEST_OUTPUT'] = 'xml:%s/xunit_gtest.xml' % log_dir

    if not os.path.isdir(artifacts_dir):
        raise Exception("Artifacts directory doesn't exist: " + artifacts_dir)

    # First run the C++ gtests
    lib_dir = 'lib'
    if 'CentOS' in get_os_type():
        lib_dir = 'lib64'

    os.environ['LD_LIBRARY_PATH'] = os.path.join(artifacts_dir, lib_dir)
    os.chdir(os.path.join(artifacts_dir, "test"))
    if (filters != None):
        gtest_filters = "--gtest_filter=" + filters
        cmd = ['./gtest_ngtf', gtest_filters]
    else:
        cmd = ['./gtest_ngtf']

    command_executor(cmd, verbose=True, stderr=subprocess.DEVNULL)
    os.chdir(root_pwd)
コード例 #4
0
ファイル: test_utils.py プロジェクト: iSwapna/ngraph-bridge
def run_ngtf_gtests(build_dir, filters):
    root_pwd = os.getcwd()
    build_dir = os.path.abspath(build_dir)

    # Check if we can run C++ tests
    if not os.path.exists(os.path.join(build_dir, "test/gtest_ngtf")):
        print("gtest_ngtf not found. Skipping C++ unit tests...")
        return

    os.environ['GTEST_OUTPUT'] = 'xml:%s/xunit_gtest.xml' % build_dir

    if not os.path.isdir(build_dir):
        raise Exception("build directory doesn't exist: " + build_dir)

    # First run the C++ gtests
    os.chdir(os.path.join(build_dir, "test"))
    if (filters != None):
        gtest_filters = "--gtest_filter=" + filters
        cmd = ['./gtest_ngtf', gtest_filters]
    else:
        cmd = ['./gtest_ngtf']

    command_executor(cmd, verbose=True, stderr=subprocess.DEVNULL)

    os.chdir(root_pwd)
コード例 #5
0
ファイル: test_utils.py プロジェクト: Tubbz-alt/ngraph-bridge
def run_tensorflow_pytests_from_artifacts(ngraph_tf_src_dir, tf_src_dir,
                                          xml_output):
    root_pwd = os.getcwd()
    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)

    # Check to see if we need to apply the patch for Grappler
    import ngraph_bridge
    patch_file_name = "test/python/tensorflow/tf_unittest_ngraph" + (
        "_with_grappler"
        if ngraph_bridge.is_grappler_enabled() else "") + ".patch"
    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir, patch_file_name))

    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    pwd = os.getcwd()

    # Go to the location of TesorFlow install directory
    import tensorflow as tf
    tf_dir = tf.sysconfig.get_lib()
    os.chdir(tf_dir + '/python/framework')
    print("CURRENT DIR: " + os.getcwd())

    print("Patching TensorFlow using: %s" % patch_file)
    cmd = subprocess.Popen('patch -N -i ' + patch_file,
                           shell=True,
                           stdout=subprocess.PIPE)
    printed_lines = cmd.communicate()
    # Check if the patch is being applied for the first time, in which case
    # cmd.returncode will be 0 or if the patch has already been applied, in
    # which case the string will be found, in all other cases the assertion
    # will fail
    assert cmd.returncode == 0 or 'patch detected!  Skipping patch' in str(
        printed_lines[0]), "Error applying the patch."
    os.chdir(pwd)

    # Now run the TensorFlow python tests
    test_src_dir = os.path.join(ngraph_tf_src_dir, "test/python/tensorflow")
    test_script = os.path.join(test_src_dir, "tf_unittest_runner.py")

    test_manifest_file = TestEnv.get_test_manifest_filename()
    if not os.path.isabs(test_manifest_file):
        test_manifest_file = os.path.join(test_src_dir, test_manifest_file)

    test_xml_report = './junit_tensorflow_tests.xml'

    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))
    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ['NGRAPH_TF_DISABLE_DEASSIGN_CLUSTERS'] = '1'

    cmd = [
        "python", test_script, "--tensorflow_path", tf_src_dir,
        "--run_tests_from_file", test_manifest_file
    ]
    if xml_output:
        cmd.extend(["--xml_report", test_xml_report])
    command_executor(cmd, verbose=True)

    os.chdir(root_pwd)
コード例 #6
0
def run_ngtf_cpp_gtests(artifacts_dir, log_dir, filters):
    root_pwd = os.getcwd()
    artifacts_dir = os.path.abspath(artifacts_dir)
    log_dir = os.path.abspath(log_dir)

    os.environ['GTEST_OUTPUT'] = 'xml:%s/xunit_gtest.xml' % log_dir

    if not os.path.isdir(artifacts_dir):
        raise Exception("Artifacts directory doesn't exist: " + artifacts_dir)

    # First run the C++ gtests
    lib_dir = 'lib'
    if 'CentOS' in get_os_type():
        lib_dir = 'lib64'

    os.environ['LD_LIBRARY_PATH'] = os.path.join(artifacts_dir, lib_dir)
    os.chdir(os.path.join(artifacts_dir, "test"))
    if (filters != None):
        gtest_filters = "--gtest_filter=" + filters
        cmd = ['./gtest_ngtf', gtest_filters]
    else:
        cmd = ['./gtest_ngtf']

    command_executor(cmd, verbose=True)
    os.chdir(root_pwd)
コード例 #7
0
 def test_MLP(self):
     cwd = os.getcwd()
     os.chdir('../model_level_tests/')
     try:
         command_executor(
             "python test_main.py --run_basic_tests --models MLP")
     finally:
         os.chdir(cwd)
コード例 #8
0
def run_tensorflow_pytests_from_artifacts(backend, ngraph_tf_src_dir,
                                          tf_src_dir, xml_output):
    root_pwd = os.getcwd()

    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)

    # Check to see if we need to apply the patch for Grappler
    import ngraph_bridge
    patch_file_name = "test/python/tensorflow/tf_unittest_ngraph" + (
        "_with_grappler"
        if ngraph_bridge.is_grappler_enabled() else "") + ".patch"
    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir, patch_file_name))

    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    pwd = os.getcwd()

    # Go to the location of TesorFlow install directory
    import tensorflow as tf
    tf_dir = tf.sysconfig.get_lib()
    os.chdir(os.path.join(tf_dir, '../'))
    print("CURRENT DIR: " + os.getcwd())

    print("Patching TensorFlow using: %s" % patch_file)
    apply_patch(patch_file)
    os.chdir(pwd)

    # Now run the TensorFlow python tests
    test_src_dir = os.path.join(ngraph_tf_src_dir, "test/python/tensorflow")
    test_script = os.path.join(test_src_dir, "tf_unittest_runner.py")
    if backend is not None and 'GPU' in backend:
        test_manifest_file = os.path.join(test_src_dir,
                                          "python_tests_list_gpu.txt")
    else:
        test_manifest_file = os.path.join(test_src_dir,
                                          "python_tests_list.txt")
    test_xml_report = './junit_tensorflow_tests.xml'

    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))
    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ['NGRAPH_TF_DISABLE_DEASSIGN_CLUSTERS'] = '1'

    # should this python be sys.executable?
    cmd = [
        "python",
        test_script,
        "--tensorflow_path",
        tf_src_dir,
        "--run_tests_from_file",
        test_manifest_file,
    ]
    if xml_output:
        cmd.extend(["--xml_report", test_xml_report])
    command_executor(cmd)

    os.chdir(root_pwd)
コード例 #9
0
ファイル: test_utils.py プロジェクト: adk9/ngraph-bridge
def run_tensorflow_pytests(venv_dir, build_dir, ngraph_tf_src_dir, tf_src_dir):
    root_pwd = os.getcwd()

    build_dir = os.path.abspath(build_dir)
    venv_dir = os.path.abspath(venv_dir)
    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)

    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir,
                     "test/python/tensorflow/tf_unittest_ngraph.patch"))

    # Load the virtual env
    venv_dir_absolute = load_venv(venv_dir)

    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    pwd = os.getcwd()

    # Go to the site-packages/tensorflow_core_python/framework
    os.chdir(
        glob.glob(venv_dir_absolute +
                  "/lib/py*/site-packages/tensorflow_core/python/framework")[0])
    print("CURRENT DIR: " + os.getcwd())

    print("Patching TensorFlow using: %s" % patch_file)
    apply_patch(patch_file)
    os.chdir(pwd)

    # Now run the TensorFlow python tests
    test_src_dir = os.path.join(ngraph_tf_src_dir, "test/python/tensorflow")
    test_script = os.path.join(test_src_dir, "tf_unittest_runner.py")
    if get_os_type() == 'Darwin':
        test_manifest_file = os.path.join(test_src_dir,
                                          "python_tests_list_mac.txt")
    else:
        test_manifest_file = os.path.join(test_src_dir, "python_tests_list.txt")
    test_xml_report = '%s/junit_tensorflow_tests.xml' % build_dir

    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))
    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ['NGRAPH_TF_DISABLE_DEASSIGN_CLUSTERS'] = '1'

    # command_executor([
    #     "python", test_script, "--tensorflow_path", tf_src_dir,
    #     "--run_tests_from_file", test_manifest_file, "--xml_report",
    #     test_xml_report
    # ], verbose=True)

    command_executor([
        "python", test_script, "--tensorflow_path", tf_src_dir,
        "--run_tests_from_file", test_manifest_file
    ],
                     verbose=True)

    os.chdir(root_pwd)
コード例 #10
0
    def test_command_line_api(self, inp_format, inp_loc, out_format,
                              commandline, ng_device):
        # Only run this test when grappler is enabled
        if not ngraph_bridge.is_grappler_enabled():
            return
        assert TestConversionScript.format_and_loc_match(inp_format, inp_loc)
        out_loc = inp_loc.split('.')[0] + '_modified' + (
            '' if out_format == 'savedmodel' else ('.' + out_format))
        try:
            (shutil.rmtree, os.remove)[os.path.isfile(out_loc)](out_loc)
        except:
            pass
        conversion_successful = False
        try:
            if commandline:
                # In CI this test is expected to be run out of artifacts/test/python
                command_executor('python ../../tools/tf2ngraph.py --input' +
                                 inp_format + ' ' + inp_loc +
                                 ' --outnodes out_node --output' + out_format +
                                 ' ' + out_loc + ' --ngbackend ' + ng_device)
            else:
                convert(inp_format, inp_loc, out_format, out_loc, ['out_node'],
                        ng_device)
            conversion_successful = True
        finally:
            if not conversion_successful:
                try:
                    (shutil.rmtree, os.remove)[os.path.isfile(out_loc)](out_loc)
                except:
                    pass
        assert conversion_successful

        gdef = get_gdef(out_format, out_loc)
        (shutil.rmtree, os.remove)[os.path.isfile(out_loc)](out_loc)

        with tf.Graph().as_default() as g:
            tf.import_graph_def(gdef, name='')
            # The graph should have exactly one encapsulate
            assert len([
                0 for i in g.get_operations() if i.type == 'NGraphEncapsulate'
            ]) == 1
            x = self.get_tensor(g, "x:0", False)
            y = self.get_tensor(g, "y:0", False)
            out = self.get_tensor(g, "out_node:0", False)

            sess_fn = lambda sess: sess.run(
                [out], feed_dict={i: np.zeros((10,)) for i in [x, y]})

            res1 = self.with_ngraph(sess_fn)
            res2 = self.without_ngraph(sess_fn)

            exp = [0.5 * np.ones((10,))]
            # Note both run on Host (because NgraphEncapsulate can only run on host)
            assert np.isclose(res1, res2).all()
            # Comparing with expected value
            assert np.isclose(res1, exp).all()
コード例 #11
0
def run_resnet50(build_dir):

    root_pwd = os.getcwd()
    build_dir = os.path.abspath(build_dir)
    os.chdir(build_dir)

    call(['git', 'clone', 'https://github.com/tensorflow/benchmarks.git'])
    os.chdir('benchmarks/scripts/tf_cnn_benchmarks/')

    call(['git', 'checkout', '4c7b09ad87bbfc4b1f89650bcee40b3fc5e7dfed'])

    junit_script = os.path.abspath('%s/test/ci/junit-wrap.sh' % root_pwd)

    # Update the script by adding `import ngraph_bridge`
    with open('convnet_builder.py', 'a') as outfile:
        call(['echo', 'import ngraph_bridge'], stdout=outfile)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))

    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    # Delete the temporary model save directory
    model_save_dir = os.getcwd() + '/modelsavepath'
    if os.path.exists(model_save_dir) and os.path.isdir(model_save_dir):
        shutil.rmtree(model_save_dir)

    os.environ['JUNIT_WRAP_FILE'] = "%s/junit_training_test.xml" % build_dir
    os.environ['JUNIT_WRAP_SUITE'] = 'models'
    os.environ['JUNIT_WRAP_TEST'] = 'resnet50-training'

    # Run training job
    cmd = [
        junit_script, 'python', 'tf_cnn_benchmarks.py', '--data_format',
        'NCHW', '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--num_batches', '10', '--model=resnet50', '--batch_size=128'
    ]
    command_executor(cmd)

    os.environ['JUNIT_WRAP_FILE'] = "%s/junit_inference_test.xml" % build_dir
    os.environ['JUNIT_WRAP_SUITE'] = 'models'
    os.environ['JUNIT_WRAP_TEST'] = 'resnet50-inference'

    # Run inference job
    cmd = [
        junit_script, 'python', 'tf_cnn_benchmarks.py', '--data_format',
        'NCHW', '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--model=resnet50', '--batch_size=128', '--num_batches', '10', '--eval'
    ]
    command_executor(cmd)

    os.chdir(root_pwd)
コード例 #12
0
def run_resnet50_forward_pass_from_artifacts(ngraph_tf_src_dir, artifact_dir,
                                             batch_size, iterations):

    root_pwd = os.getcwd()
    artifact_dir = os.path.abspath(artifact_dir)
    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)
    install_ngraph_bridge(artifact_dir)

    # Now clone the repo and proceed
    call(['git', 'clone', 'https://github.com/tensorflow/benchmarks.git'])
    os.chdir('benchmarks')
    call(['git', 'checkout', '4c7b09ad87bbfc4b1f89650bcee40b3fc5e7dfed'])

    # Check to see if we need to patch the repo for Grappler
    # benchmark_cnn.patch will only work for the CPU backend
    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir, "test/grappler/benchmark_cnn.patch"))
    import ngraph_bridge
    if ngraph_bridge.is_grappler_enabled():
        print("Patching repo using: %s" % patch_file)
        apply_patch(patch_file)

    os.chdir('scripts/tf_cnn_benchmarks/')

    # junit_script = os.path.abspath('%s/test/ci/junit-wrap.sh' % root_pwd)

    # Update the script by adding `import ngraph_bridge`
    with open('convnet_builder.py', 'a') as outfile:
        call(['echo', 'import ngraph_bridge'], stdout=outfile)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))

    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    cmd = [
        'python',
        'tf_cnn_benchmarks.py',
        '--data_format',
        'NCHW',
        '--num_inter_threads',
        '2',
        '--freeze_when_forward_only=True',
        '--model=resnet50',
        '--batch_size=' + str(batch_size),
        '--num_batches',
        str(iterations),
    ]
    command_executor(cmd, verbose=True)

    os.chdir(root_pwd)
コード例 #13
0
def run_resnet50_infer_from_artifacts(artifact_dir, batch_size, iterations):
    root_pwd = os.getcwd()
    artifact_dir = os.path.abspath(artifact_dir)
    if not os.path.exists(artifact_dir):
        raise Exception("Can't find artifact dir: " + artifact_dir)
    if (len(glob.glob(artifact_dir + "/openvino_tensorflow-*.whl")) == 0):
        install_openvino_tensorflow(artifact_dir)

    # Check/download pretrained model
    pretrained_models_dir = os.path.abspath(
        os.path.join(root_pwd, '../pretrained_models'))
    if not os.path.exists(pretrained_models_dir):
        os.mkdir(pretrained_models_dir, 0o755)
    if not os.path.exists(pretrained_models_dir):
        raise AssertionError(
            "Could not find the path: {}".format(pretrained_models_dir))
    os.chdir(pretrained_models_dir)
    pretrained_model = os.path.join(pretrained_models_dir, 'resnet50_v1.pb')
    if not os.path.exists(pretrained_model):
        # wget https://zenodo.org/record/2535873/files/resnet50_v1.pb
        command_executor(
            ['wget', 'https://zenodo.org/record/2535873/files/resnet50_v1.pb'],
            verbose=True)
        if not os.path.exists(pretrained_model):
            raise Exception("Can't download pretrained model: " +
                            pretrained_model)
    else:
        print("Using existing pretrained model file: " + pretrained_model)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))
    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    if not os.path.exists(root_pwd):
        raise AssertionError("Could not find the path")
    os.chdir(root_pwd)
    cmd = [
        'python',
        artifact_dir + '/test/python/test_rn50_infer.py',
        '--input-graph',
        pretrained_model,
        '--batch-size',
        str(batch_size),
        '--num-images',
        str(batch_size * iterations),
    ]
    command_executor(cmd, verbose=True)
    if not os.path.exists(root_pwd):
        raise AssertionError("Could not find the path")
    os.chdir(root_pwd)
コード例 #14
0
def install_ngraph_bridge(artifacts_dir):
    # Determine the ngraph whl
    ngtf_wheel_files = glob.glob(artifacts_dir +
                                 "/ngraph_tensorflow_bridge-*.whl")
    if (len(ngtf_wheel_files) != 1):
        print("Multiple Python whl files exist. Please remove old wheels")
        for whl in ngtf_wheel_files:
            print("Existing Wheel: " + whl)
        raise Exception("Error getting the ngraph-tf wheel file")

    ng_whl = os.path.join(artifacts_dir, ngtf_wheel_files[0])
    command_executor(["pip", "install", "-U", ng_whl])
コード例 #15
0
def run_tensorflow_pytests_from_artifacts(ngraph_tf_src_dir, tf_src_dir,
                                          xml_output):
    root_pwd = os.getcwd()

    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)

    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir,
                     "test/python/tensorflow/tf_unittest_ngraph.patch"))

    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    pwd = os.getcwd()

    # Go to the location of TesorFlow install directory
    import tensorflow as tf
    tf_dir = tf.sysconfig.get_lib()
    os.chdir(os.path.join(tf_dir, '../'))
    print("CURRENT DIR: " + os.getcwd())

    print("Patching TensorFlow using: %s" % patch_file)
    result = call(["patch", "-p1", "-N", "-i", patch_file])
    print("Patch result: %d" % result)
    os.chdir(pwd)

    # Now run the TensorFlow python tests
    test_src_dir = os.path.join(ngraph_tf_src_dir, "test/python/tensorflow")
    test_script = os.path.join(test_src_dir, "tf_unittest_runner.py")
    test_manifest_file = os.path.join(test_src_dir, "python_tests_list.txt")
    test_xml_report = './junit_tensorflow_tests.xml'

    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))
    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ['NGRAPH_TF_DISABLE_DEASSIGN_CLUSTERS'] = '1'

    cmd = [
        "python",
        test_script,
        "--tensorflow_path",
        tf_src_dir,
        "--run_tests_from_file",
        test_manifest_file,
    ]
    if xml_output:
        cmd.extend(["--xml_report", test_xml_report])
    command_executor(cmd)

    os.chdir(root_pwd)
コード例 #16
0
def install_openvino_tensorflow(artifacts_dir):
    ovtf_wheel_files = glob.glob(artifacts_dir + "/openvino_tensorflow*.whl")

    if (len(ovtf_wheel_files) != 1):
        print("Multiple Python whl files exist. Please remove old wheels")
        for whl in ovtf_wheel_files:
            print("Existing Wheel: " + whl)
        raise Exception("Error getting the openvino_tensorflow wheel file")

    if (platform.system() == 'Windows'):
        command_executor([
            "pip", "install", "-U", ovtf_wheel_files[0].replace("\\", "\\\\")
        ])
    else:
        ng_whl = os.path.join(artifacts_dir, ovtf_wheel_files[0])
        command_executor(["pip", "install", "-U", ng_whl])
コード例 #17
0
ファイル: test_utils.py プロジェクト: iSwapna/ngraph-bridge
def run_resnet50_forward_pass(build_dir):

    root_pwd = os.getcwd()
    build_dir = os.path.abspath(build_dir)
    ngraph_tf_src_dir = os.path.abspath(build_dir + '/../')
    os.chdir(build_dir)

    call(['git', 'clone', 'https://github.com/tensorflow/benchmarks.git'])
    os.chdir('benchmarks')
    call(['git', 'checkout', '4c7b09ad87bbfc4b1f89650bcee40b3fc5e7dfed'])

    junit_script = os.path.abspath('%s/test/ci/junit-wrap.sh' % root_pwd)

    # Check to see if we need to patch the repo for Grappler
    # benchmark_cnn.patch will only work for the CPU backend
    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir, "test/grappler/benchmark_cnn.patch"))
    import ngraph_bridge
    if ngraph_bridge.is_grappler_enabled():
        print("Patching repo using: %s" % patch_file)
        apply_patch(patch_file)

    os.chdir('scripts/tf_cnn_benchmarks/')
    # Update the script by adding `import ngraph_bridge`
    with open('convnet_builder.py', 'a') as outfile:
        call(['echo', 'import ngraph_bridge'], stdout=outfile)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))

    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    os.environ['JUNIT_WRAP_FILE'] = "%s/junit_inference_test.xml" % build_dir
    os.environ['JUNIT_WRAP_SUITE'] = 'models'
    os.environ['JUNIT_WRAP_TEST'] = 'resnet50-inference'

    # Run inference job
    cmd = [
        junit_script, 'python', 'tf_cnn_benchmarks.py', '--data_format', 'NHWC',
        '--num_inter_threads', '2', '--freeze_when_forward_only=True',
        '--model=resnet50', '--batch_size=1', '--num_batches', '32'
    ]
    command_executor(cmd, verbose=True)
    os.chdir(root_pwd)
コード例 #18
0
 def test_MLP(self):
     cwd = os.getcwd()
     os.chdir('../model_level_tests/')
     grappler = ngraph_bridge.is_grappler_enabled()
     varopts = ngraph_bridge.are_variables_enabled()
     if grappler:
         if varopts:
             assert False, "Varopts and grappler does not build together right now"
         else:
             config = "grappler"
     else:
         config = "varopts" if varopts else "default"
     try:
         command_executor(
             "python test_main.py --run_basic_tests --models MLP --ignore_test time --configuration "
             + config)
     finally:
         os.chdir(cwd)
コード例 #19
0
def run_ngtf_gtests(build_dir, filters):
    root_pwd = os.getcwd()
    build_dir = os.path.abspath(build_dir)

    os.environ['GTEST_OUTPUT'] = 'xml:%s/xunit_gtest.xml' % build_dir

    if not os.path.isdir(build_dir):
        raise Exception("build directory doesn't exist: " + build_dir)

    # First run the C++ gtests
    os.chdir(os.path.join(build_dir, "test"))
    if (filters != None):
        gtest_filters = "--gtest_filter=" + filters
        cmd = ['./gtest_ngtf', gtest_filters]
    else:
        cmd = ['./gtest_ngtf']

    command_executor(cmd, verbose=True)

    os.chdir(root_pwd)
コード例 #20
0
def run_ngtf_pytests_from_artifacts(artifacts_dir):
    root_pwd = os.getcwd()

    artifacts_dir = os.path.abspath(artifacts_dir)
    install_ngraph_bridge(artifacts_dir)

    test_dir = os.path.join(artifacts_dir, "test")
    test_dir = os.path.join(test_dir, "python")

    if not os.path.isdir(test_dir):
        raise Exception("test directory doesn't exist: " + test_dir)

    # Change the directory to the test_dir
    os.chdir(test_dir)

    # Next run the ngraph-tensorflow python tests
    command_executor(["pip", "install", "-U", "pytest"])
    command_executor(["pip", "install", "-U", "psutil"])

    test_manifest_file = TestEnv.get_test_manifest_filename()
    # export the env-var for pytest to process manifest in conftest.py
    os.environ['NGRAPH_TF_TEST_MANIFEST'] = test_manifest_file

    command_executor([
        "python", "-m", "pytest",
        ('--junitxml=%s/xunit_pytest.xml' % artifacts_dir)
    ])

    os.chdir(root_pwd)
コード例 #21
0
def run_ngtf_pytests_from_artifacts(artifacts_dir):
    root_pwd = os.getcwd()

    artifacts_dir = os.path.abspath(artifacts_dir)
    install_ngraph_bridge(artifacts_dir)

    test_dir = os.path.join(artifacts_dir, "test")
    test_dir = os.path.join(test_dir, "python")

    if not os.path.isdir(test_dir):
        raise Exception("test directory doesn't exist: " + test_dir)

    # Change the directory to the test_dir
    os.chdir(test_dir)

    # Next run the ngraph-tensorflow python tests
    command_executor(["pip", "install", "-U", "pytest"])
    command_executor(["pip", "install", "-U", "psutil"])
    command_executor([
        "python", "-m", "pytest",
        ('--junitxml=%s/xunit_pytest.xml' % artifacts_dir),
        "--ignore=" + artifacts_dir + "/test/python/bfloat16"
    ])

    os.chdir(root_pwd)
コード例 #22
0
def run_ngtf_pytests(venv_dir, build_dir):
    root_pwd = os.getcwd()

    build_dir = os.path.abspath(build_dir)
    venv_dir = os.path.abspath(venv_dir)

    test_dir = os.path.join(build_dir, "test")
    test_dir = os.path.join(test_dir, "python")

    if not os.path.isdir(test_dir):
        raise Exception("test directory doesn't exist: " + test_dir)

    # Change the directory to the test_dir
    os.chdir(test_dir)

    # Load venv
    load_venv(venv_dir)

    # Next run the ngraph-tensorflow python tests
    command_executor([sys.executable, "-m", "pip", "install", "-U", "pytest"])
    command_executor([sys.executable, "-m", "pip", "install", "-U", "psutil"])
    command_executor([
        "python", "-m", "pytest",
        ('--junitxml=%s/xunit_pytest.xml' % build_dir)
    ],
                     verbose=True)

    os.chdir(root_pwd)
コード例 #23
0
def run_ngtf_pytests(venv_dir, build_dir):
    root_pwd = os.getcwd()

    build_dir = os.path.abspath(build_dir)
    venv_dir = os.path.abspath(venv_dir)
    mnist_dir = os.path.abspath(build_dir + '/examples/mnist/')
    axpy_dir = os.path.abspath(build_dir + '/examples/')
    test_dir = os.path.join(build_dir, "test")
    test_dir = os.path.join(test_dir, "python")

    if not os.path.isdir(test_dir):
        raise Exception("test directory doesn't exist: " + test_dir)

    # Change the directory to the test_dir
    os.chdir(test_dir)

    # Load venv
    load_venv(venv_dir)

    # Next run the ngraph-tensorflow python tests
    command_executor(["pip", "install", "-U", "pytest"])
    command_executor(["pip", "install", "-U", "psutil"])

    cmd = 'python -m pytest ' + (
        '--junitxml=%s/xunit_pytest.xml' %
        build_dir) + " --ignore=" + build_dir + "/test/python/bfloat16"
    env = os.environ.copy()
    new_paths = venv_dir + '/bin/python3:' + os.path.abspath(
        build_dir) + ":" + os.path.abspath(axpy_dir) + ":" + os.path.abspath(
            mnist_dir)
    if 'PYTHONPATH' in env:
        env["PYTHONPATH"] = new_paths + ":" + env["PYTHONPATH"]
    else:
        env["PYTHONPATH"] = new_paths
    ps = Popen(cmd, shell=True, env=env)
    so, se = ps.communicate()
    errcode = ps.returncode
    assert errcode == 0, "Error in running command: " + cmd
    os.chdir(root_pwd)
コード例 #24
0
def run_bazel_build():
    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    root_pwd = os.getcwd()

    # Now run the configure
    command_executor(['bash', 'configure_bazel.sh'])

    # Build the cpp app - hello_tf
    command_executor(['bazel', 'build', 'hello_tf'])

    # Run the cpp app - hello_tf
    command_executor(['bazel-bin/hello_tf'])

    # Now built the bigger app
    command_executor(['bazel', 'build', 'infer_multiple_networks'])

    # Return to the original directory
    os.chdir(root_pwd)
コード例 #25
0
ファイル: test_utils.py プロジェクト: mhbuehler/ngraph-bridge
def run_bazel_build_test(venv_dir, build_dir):
    # Load the virtual env
    venv_dir_absolute = load_venv(venv_dir)

    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    root_pwd = os.getcwd()

    # Now run the configure
    command_executor(['bash', 'configure_bazel.sh'])

    # Build the cpp app - hello_tf
    command_executor(['bazel', 'build', 'hello_tf'])

    # Run the cpp app - hello_tf
    command_executor(['bazel-bin/hello_tf'])

    # Now built the bigger app
    command_executor(['bazel', 'build', 'infer_multi'])

    # Return to the original directory
    os.chdir(root_pwd)
コード例 #26
0
def run_ovtf_pytests_from_artifacts(artifacts_dir):
    root_pwd = os.getcwd()

    artifacts_dir = os.path.abspath(artifacts_dir)
    install_openvino_tensorflow(artifacts_dir)

    test_dir = os.path.join(artifacts_dir, "test")
    test_dir = os.path.join(test_dir, "python")

    if not os.path.isdir(test_dir):
        raise Exception("test directory doesn't exist: " + test_dir)

    # Change the directory to the test_dir
    if not os.path.exists(test_dir):
        raise AssertionError("Could not find directory: {}".format(test_dir))
    os.chdir(test_dir)

    # Next run the ngraph-tensorflow python tests
    command_executor(["pip", "install", "-U", "pytest"])
    command_executor(["pip", "install", "-U", "psutil"])

    test_manifest_file = TestEnv.get_test_manifest_filename()
    # export the env-var for pytest to process manifest in conftest.py
    os.environ['OPENVINO_TF_TEST_MANIFEST'] = test_manifest_file
    if (platform.system() == "Windows"):
        command_executor([
            sys.executable.replace("\\", "\\\\"), "-m", "pytest",
            ('--junitxml=%s\\xunit_pytest.xml' % artifacts_dir).replace(
                "\\", "\\\\")
        ])
    else:
        command_executor([
            sys.executable, "-m", "pytest",
            ('--junitxml=%s\\xunit_pytest.xml' % artifacts_dir)
        ])

    if not os.path.exists(root_pwd):
        raise AssertionError("Could not find directory")
    os.chdir(root_pwd)
コード例 #27
0
def run_bazel_build():
    # Next patch the TensorFlow so that the tests run using ngraph_bridge
    root_pwd = os.getcwd()

    # Now run the configure
    command_executor(['bash', 'configure_bazel.sh'])

    # Build the bridge
    command_executor(['bazel', 'build', 'libngraph_bridge.so'])

    # Build the backend
    command_executor(['bazel', 'build', '@ngraph//:libinterpreter_backend.so'])

    # Return to the original directory
    os.chdir(root_pwd)
コード例 #28
0
def run_cpp_example_test(build_dir):

    root_pwd = os.getcwd()
    build_dir = os.path.abspath(build_dir)
    os.chdir(build_dir)

    # Create the example workspace directory and chdir there
    path = 'cpp_example'
    try:
        os.makedirs(path)
    except OSError as exc:  # Python >2.5
        if exc.errno == errno.EEXIST and os.path.isdir(path):
            pass
    os.chdir(path)

    # Copy the files
    files = [
        '../../examples/tf_cpp_examples/hello_tf.cpp',
        '../../examples/tf_cpp_examples/Makefile'
    ]
    command_executor(['cp', files[0], './'])
    command_executor(['cp', files[1], './'])

    # Now execute Make
    command_executor(['make'])

    # Now run the hello_tf example
    # First setup the LD_LIB_PATH
    if (platform.system() == 'Darwin'):
        ld_path_name = 'DYLD_LIBRARY_PATH'
    else:
        ld_path_name = 'LD_LIBRARY_PATH'

    os.environ[ld_path_name] = '../artifacts/lib:../artifacts/tensorflow'
    command_executor('./hello_tf')

    # Return to the original directory
    os.chdir(root_pwd)
コード例 #29
0
def run_resnet50_from_artifacts(ngraph_tf_src_dir, artifact_dir, batch_size,
                                iterations):

    root_pwd = os.getcwd()
    artifact_dir = os.path.abspath(artifact_dir)
    ngraph_tf_src_dir = os.path.abspath(ngraph_tf_src_dir)
    install_ngraph_bridge(artifact_dir)

    # Now clone the repo and proceed
    call(['git', 'clone', 'https://github.com/tensorflow/benchmarks.git'])
    os.chdir('benchmarks')
    call(['git', 'checkout', '4c7b09ad87bbfc4b1f89650bcee40b3fc5e7dfed'])

    # Check to see if we need to patch the repo for Grappler
    patch_file = os.path.abspath(
        os.path.join(ngraph_tf_src_dir, "test/grappler/benchmark_cnn.patch"))
    import ngraph_bridge
    if ngraph_bridge.is_grappler_enabled():
        print("Patching repo using: %s" % patch_file)
        apply_patch(patch_file)

    os.chdir('scripts/tf_cnn_benchmarks/')

    # junit_script = os.path.abspath('%s/test/ci/junit-wrap.sh' % root_pwd)

    # Update the script by adding `import ngraph_bridge`
    with open('convnet_builder.py', 'a') as outfile:
        call(['echo', 'import ngraph_bridge'], stdout=outfile)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))

    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    # Delete the temporary model save directory
    model_save_dir = os.getcwd() + '/modelsavepath'
    if os.path.exists(model_save_dir) and os.path.isdir(model_save_dir):
        shutil.rmtree(model_save_dir)

    eval_eventlog_dir = os.getcwd() + '/eval_eventlog_dir'
    if os.path.exists(eval_eventlog_dir) and os.path.isdir(eval_eventlog_dir):
        shutil.rmtree(eval_eventlog_dir)

    # os.environ['JUNIT_WRAP_FILE'] = "%s/junit_training_test.xml" % build_dir
    # os.environ['JUNIT_WRAP_SUITE'] = 'models'
    # os.environ['JUNIT_WRAP_TEST'] = 'resnet50-training'

    # Run training job
    # cmd = [
    #     junit_script, 'python', 'tf_cnn_benchmarks.py', '--data_format',
    #     'NCHW', '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
    #     '--num_batches', '10', '--model=resnet50', '--batch_size=128'
    # ]

    cmd = [
        'python', 'tf_cnn_benchmarks.py', '--data_format', 'NCHW',
        '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--num_batches',
        str(iterations), '--model=resnet50', '--batch_size=' + str(batch_size),
        '--eval_dir=' + eval_eventlog_dir
    ]
    command_executor(cmd, verbose=True)

    # os.environ['JUNIT_WRAP_FILE'] = "%s/junit_inference_test.xml" % build_dir
    # os.environ['JUNIT_WRAP_SUITE'] = 'models'
    # os.environ['JUNIT_WRAP_TEST'] = 'resnet50-inference'

    # Run inference job
    # cmd = [
    #     junit_script, 'python', 'tf_cnn_benchmarks.py', '--data_format',
    #     'NCHW', '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
    #     '--model=resnet50', '--batch_size=128', '--num_batches', '10', '--eval'
    # ]
    cmd = [
        'python', 'tf_cnn_benchmarks.py', '--data_format', 'NCHW',
        '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--model=resnet50', '--batch_size=' + str(batch_size), '--num_batches',
        str(iterations), '--eval', '--eval_dir=' + eval_eventlog_dir
    ]
    command_executor(cmd, verbose=True)

    os.chdir(root_pwd)
コード例 #30
0
def run_resnet50_from_artifacts(openvino_tf_src_dir, artifact_dir, batch_size,
                                iterations):
    root_pwd = os.getcwd()
    artifact_dir = os.path.abspath(artifact_dir)
    openvino_tf_src_dir = os.path.abspath(openvino_tf_src_dir)
    install_openvino_tensorflow(artifact_dir)

    # Now clone the repo and proceed
    subprocess.Popen(
        shlex.split('git clone https://github.com/tensorflow/benchmarks.git'))
    if not os.path.exists('benchmarks'):
        raise AssertionError(
            "Could not find directory: {}".format('benchmarks'))
    os.chdir('benchmarks')
    subprocess.Popen(
        shlex.split('git checkout aef6daa90a467a1fc7ce8395cd0067e5fda1ecff'))

    # Check to see if we need to patch the repo for Grappler
    # benchmark_cnn.patch will only work for the CPU backend
    patch_file = os.path.abspath(
        os.path.join(openvino_tf_src_dir, "test/grappler/benchmark_cnn.patch"))
    import openvino_tensorflow
    if openvino_tensorflow.is_grappler_enabled():
        print("Patching repo using: %s" % patch_file)
        apply_patch(patch_file)
    if not os.path.exists('scripts/tf_cnn_benchmarks/'):
        raise AssertionError("Could not find directory: {}".format(
            'scripts/tf_cnn_benchmarks/'))
    os.chdir('scripts/tf_cnn_benchmarks/')

    # junit_script = os.path.abspath('%s/test/ci/junit-wrap.sh' % root_pwd)

    # Update the script by adding `import openvino_tensorflow`
    with open('convnet_builder.py', 'a') as outfile:
        subprocess.Popen(shlex.split('echo import openvino_tensorflow'),
                         stdout=outfile)

    # Setup the env flags
    import psutil
    num_cores = int(psutil.cpu_count(logical=False))
    print("OMP_NUM_THREADS: %s " % str(num_cores))

    os.environ['OMP_NUM_THREADS'] = str(num_cores)
    os.environ["KMP_AFFINITY"] = 'granularity=fine,compact,1,0'

    # Delete the temporary model save directory
    model_save_dir = os.getcwd() + '/modelsavepath'
    if os.path.exists(model_save_dir) and os.path.isdir(model_save_dir):
        shutil.rmtree(model_save_dir)

    eval_eventlog_dir = os.getcwd() + '/eval_eventlog_dir'
    if os.path.exists(eval_eventlog_dir) and os.path.isdir(eval_eventlog_dir):
        shutil.rmtree(eval_eventlog_dir)

    cmd = [
        'python', 'tf_cnn_benchmarks.py', '--data_format', 'NHWC',
        '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--num_batches',
        str(iterations), '--model=resnet50', '--batch_size=' + str(batch_size),
        '--eval_dir=' + eval_eventlog_dir
    ]
    command_executor(cmd, verbose=True)
    cmd = [
        'python', 'tf_cnn_benchmarks.py', '--data_format', 'NHWC',
        '--num_inter_threads', '1', '--train_dir=' + model_save_dir,
        '--model=resnet50', '--batch_size=' + str(batch_size), '--num_batches',
        str(iterations), '--eval', '--eval_dir=' + eval_eventlog_dir
    ]
    # Commenting the eval since it currently fails with TF2.0
    command_executor(cmd, verbose=True)

    if not os.path.exists(root_pwd):
        raise AssertionError("Could not find the path")
    os.chdir(root_pwd)