Ejemplo n.º 1
0
def _run_test(test_dict):
    """Runs an individual test dict."""
    tf_module_builder_lambda = test_dict["tf_module_builder"]
    tf_module = tf_module_builder_lambda()
    input_module = compiler.tf_module_to_compiler_module(tf_module,
                                                         pass_pipeline=())

    passes = test_dict.get("passes")
    expect_pass_failure = test_dict.get("expect_pass_failure")
    if passes:
        try:
            input_module.run_pass_pipeline(passes)
        except:  # pylint: disable=bare-except
            if not expect_pass_failure:
                print(
                    "UNEXPECTED PASS FAILURE (INTERMEDIATE ASM FOLLOWS ON STDERR):",
                    file=sys.stderr)
                print(input_module.to_asm(), file=sys.stderr)
            raise

    # Print the input module ASM.
    if test_dict.get("print_input_module"):
        print(input_module.to_asm())
Ejemplo n.º 2
0
 def _compile_module(module, backend_info, exported_names, artifacts_dir):
     compiler_module = compiler.tf_module_to_compiler_module(
         module, exported_names, pass_pipeline=())
     return _incrementally_lower_compiler_module(compiler_module,
                                                 backend_info,
                                                 artifacts_dir)
Ejemplo n.º 3
0
def compile_tf_module(
    tf_module: Type[tf.Module],
    backend_infos: Sequence["BackendInfo"] = (),
    exported_names: Sequence[str] = (),
    artifacts_dir: str = None
) -> Tuple[compiler.binding.OpaqueBlob, Union[str, None]]:
    """Compiles a TensorFlow tf.Module and optionally saves compilation artifacts.

  The artifact this creates is not callable. See IreeCompiledModule for an API
  that returns a module that can be called without any further steps.

  If artifacts_dir is provided then the following artifacts will be saved:
    backend_name/saved_model:
      A TF SavedModel directory containing the files used translate the
      tf.Module into an IREE module. Only saved if '--keep_saved_model=True'.
    tf_input.mlir:
      MLIR for the module in TF's input dialect.
    iree_input.mlir:
      The MLIR above translated to IREE via compiler.TF_IMPORT_PASS_PIPELINE.
    backend_name/compiled.vmfb:
      A VM FlatBuffer compiled to the target backends from the IREE MLIR above.

  If multiple backends are specified, then instead of saving the SavedModel and
  compiled 'vmfb' under 'backend_name/', they will be saved as follows:
    - 'saved_model__{backends}'
    - 'compiled__{backends}.vmfb'
  where 'backends' is a '__' delimited list (e.g. iree_vmla__iree_llvmjit).

  Args:
    tf_module: A tf.Module.
    backend_infos: Iterable of BackendInfo names to compile for.
    exported_names: Iterable of dotted function names to consider for
      compilation.
    artifacts_dir: An optional string pointing to where compilation artifacts
      should be saved.

  Returns:
    A compiled IREE module blob and the path to the compiled VM FlatBuffer if
    artifacts_dir is provided.
  """

    if artifacts_dir is not None and FLAGS.keep_saved_model:
        # Create a saved model for these target backends to avoid a race condition
        # when running a test suite.
        # TODO(meadowlark): Remove this once we have a TfLiteCompiledModule.
        sm_path = _get_backends_path("saved_model", backend_infos,
                                     artifacts_dir)
    else:
        sm_path = None

    if artifacts_dir is not None:
        # Set up a crash reproducer for debugging.
        backends_string = backends_to_str(backend_infos)
        compiler.Context.default_crash_reproducer_path = os.path.join(
            artifacts_dir, f"reproducer__{backends_string}.mlir")

    try:
        # Convert the tf_module into raw TF input MLIR.
        compiler_module = compiler.tf_module_to_compiler_module(
            tf_module, exported_names, sm_path, pass_pipeline=())

        if artifacts_dir is not None:
            tf_mlir_path = os.path.join(artifacts_dir, "tf_input.mlir")
            logging.info("Saving raw TF input MLIR to: %s", tf_mlir_path)
            with open(tf_mlir_path, "w") as f:
                f.write(compiler_module.to_asm())

        # Now run the passes manually that tf_module_to_compiler_module would
        # usually do.
        compiler_module.run_pass_pipeline(compiler.TF_IMPORT_PASS_PIPELINE)

        if artifacts_dir is not None:
            iree_mlir_path = os.path.join(artifacts_dir, "iree_input.mlir")
            logging.info("Saving IREE input MLIR to: %s", iree_mlir_path)
            with open(iree_mlir_path, "w") as f:
                f.write(compiler_module.to_asm())

        target_backends = []
        for backend_info in backend_infos:
            target_backends.extend(backend_info.compiler_targets)
        compiled_module = compiler_module.compile(
            target_backends=target_backends)

        compiled_path = None
        if artifacts_dir is not None:
            compiled_path = _get_backends_path("compiled", backend_infos,
                                               artifacts_dir)
            compiled_path = f"{compiled_path}.vmfb"
            logging.info("Saving compiled IREE module to: %s", compiled_path)
            with open(compiled_path, "wb") as f:
                f.write(compiled_module)

    except Exception:  # pylint: disable=broad-except
        if artifacts_dir is not None:
            # Disable the crash reproducer (to avoid inadvertently overwriting it).
            compiler.Context.default_crash_reproducer_path = None
        raise

    return compiled_module, compiled_path