def __init__(self, *args, **kwargs):
     super().__init__(
         name="benchmark://example-v0",
         license="MIT",
         description="An example dataset",
     )
     self._benchmarks = {
         "/foo":
         Benchmark.from_file_contents("benchmark://example-v0/foo",
                                      "Ir data".encode("utf-8")),
         "/bar":
         Benchmark.from_file_contents("benchmark://example-v0/bar",
                                      "Ir data".encode("utf-8")),
     }
Пример #2
0
    def benchmark_from_seed(self, seed: int) -> Benchmark:
        """Get a benchmark from a uint32 seed.

        :param seed: A number in the range 0 <= n < 2^32.

        :return: A benchmark instance.
        """
        self.install()

        # Run llvm-stress with the given seed and pipe the output to llvm-as to
        # assemble a bitcode.
        try:
            with Popen(
                [str(llvm.llvm_stress_path()), f"--seed={seed}"],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE,
            ) as llvm_stress:
                with Popen(
                    [str(llvm.llvm_as_path()), "-"],
                        stdin=llvm_stress.stdout,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.PIPE,
                ) as llvm_as:
                    stdout, _ = llvm_as.communicate(timeout=60)
                    llvm_stress.communicate(timeout=60)
                    if llvm_stress.returncode or llvm_as.returncode:
                        raise BenchmarkInitError(
                            "Failed to generate benchmark")
        except subprocess.TimeoutExpired:
            raise BenchmarkInitError("Benchmark generation timed out")

        return Benchmark.from_file_contents(f"{self.name}/{seed}", stdout)
Пример #3
0
def test_invalid_benchmark_data(env: LlvmEnv):
    benchmark = Benchmark.from_file_contents("benchmark://new",
                                             "Invalid bitcode".encode("utf-8"))

    with pytest.raises(
            ValueError,
            match='Failed to parse LLVM bitcode: "benchmark://new"'):
        env.reset(benchmark=benchmark)
Пример #4
0
    def __init__(self, *args, **kwargs):
        super().__init__(
            name="benchmark://unrolling-v0",
            license="MIT",
            description="Unrolling example dataset",
        )

        self._benchmarks = {
            "/offsets1":
            Benchmark.from_file_contents(
                "benchmark://unrolling-v0/offsets1",
                self.preprocess(BENCHMARKS_PATH / "offsets1.c"),
            ),
            "/conv2d":
            Benchmark.from_file_contents(
                "benchmark://unrolling-v0/conv2d",
                self.preprocess(BENCHMARKS_PATH / "conv2d.c"),
            ),
        }
Пример #5
0
    def __init__(self, *args, **kwargs):
        super().__init__(
            name="benchmark://unrolling-v0",
            license="MIT",
            description="Unrolling example dataset",
            site_data_base=site_data_path(
                "example_dataset"
            ),  # TODO: what should we set this to? we are not using it
        )

        self._benchmarks = {
            "/offsets1": Benchmark.from_file_contents(
                "benchmark://unrolling-v0/offsets1",
                self.preprocess(BENCHMARKS_PATH / "offsets1.c"),
            ),
            "/conv2d": Benchmark.from_file_contents(
                "benchmark://unrolling-v0/conv2d",
                self.preprocess(BENCHMARKS_PATH / "conv2d.c"),
            ),
        }
Пример #6
0
    def __init__(self, *args, **kwargs):
        super().__init__(
            name="benchmark://loops-opt-v0",
            license="MIT",
            description="Loops optimization dataset",
        )

        self._benchmarks = {
            "benchmark://loops-opt-v0/add": Benchmark.from_file_contents(
                "benchmark://loops-opt-v0/add",
                self.preprocess(BENCHMARKS_PATH / "add.c"),
            ),
            "benchmark://loops-opt-v0/offsets1": Benchmark.from_file_contents(
                "benchmark://loops-opt-v0/offsets1",
                self.preprocess(BENCHMARKS_PATH / "offsets1.c"),
            ),
            "benchmark://loops-opt-v0/conv2d": Benchmark.from_file_contents(
                "benchmark://loops-opt-v0/conv2d",
                self.preprocess(BENCHMARKS_PATH / "conv2d.c"),
            ),
        }
Пример #7
0
def make_benchmark(
    inputs: Union[str, Path, ClangInvocation, List[Union[str, Path, ClangInvocation]]],
    copt: Optional[List[str]] = None,
    system_includes: bool = True,
    timeout: int = 600,
) -> Benchmark:
    """Create a benchmark for use by LLVM environments.

    This function takes one or more inputs and uses them to create an LLVM
    bitcode benchmark that can be passed to
    :meth:`compiler_gym.envs.LlvmEnv.reset`.

    The following input types are supported:

    +-----------------------------------------------------+---------------------+-------------------------------------------------------------+
    | **File Suffix**                                     | **Treated as**      | **Converted using**                                         |
    +-----------------------------------------------------+---------------------+-------------------------------------------------------------+
    | :code:`.bc`                                         | LLVM IR bitcode     | No conversion required.                                     |
    +-----------------------------------------------------+---------------------+-------------------------------------------------------------+
    | :code:`.ll`                                         | LLVM IR text format | Assembled to bitcode using llvm-as.                         |
    +-----------------------------------------------------+---------------------+-------------------------------------------------------------+
    | :code:`.c`, :code:`.cc`, :code:`.cpp`, :code:`.cxx` | C / C++ source      | Compiled to bitcode using clang and the given :code:`copt`. |
    +-----------------------------------------------------+---------------------+-------------------------------------------------------------+

    .. note::

        The LLVM IR format has no compatability guarantees between versions (see
        `LLVM docs
        <https://llvm.org/docs/DeveloperPolicy.html#ir-backwards-compatibility>`_).
        You must ensure that any :code:`.bc` and :code:`.ll` files are
        compatible with the LLVM version used by CompilerGym, which can be
        reported using :func:`env.compiler_version
        <compiler_gym.envs.CompilerEnv.compiler_version>`.

    E.g. for single-source C/C++ programs, you can pass the path of the source
    file:

        >>> benchmark = make_benchmark('my_app.c')
        >>> env = gym.make("llvm-v0")
        >>> env.reset(benchmark=benchmark)

    The clang invocation used is roughly equivalent to:

    .. code-block::

        $ clang my_app.c -O0 -c -emit-llvm -o benchmark.bc

    Additional compile-time arguments to clang can be provided using the
    :code:`copt` argument:

        >>> benchmark = make_benchmark('/path/to/my_app.cpp', copt=['-O2'])

    If you need more fine-grained control over the options, you can directly
    construct a :class:`ClangInvocation
    <compiler_gym.envs.llvm.ClangInvocation>` to pass a list of arguments to
    clang:

        >>> benchmark = make_benchmark(
            ClangInvocation(['/path/to/my_app.c'], system_includes=False, timeout=10)
        )

    For multi-file programs, pass a list of inputs that will be compiled
    separately and then linked to a single module:

        >>> benchmark = make_benchmark([
            'main.c',
            'lib.cpp',
            'lib2.bc',
            'foo/input.bc'
        ])

    :param inputs: An input, or list of inputs.

    :param copt: A list of command line options to pass to clang when compiling
        source files.

    :param system_includes: Whether to include the system standard libraries
        during compilation jobs. This requires a system toolchain. See
        :func:`get_system_library_flags`.

    :param timeout: The maximum number of seconds to allow clang to run before
        terminating.

    :return: A :code:`Benchmark` instance.

    :raises FileNotFoundError: If any input sources are not found.

    :raises TypeError: If the inputs are of unsupported types.

    :raises OSError: If a suitable compiler cannot be found.

    :raises BenchmarkInitError: If a compilation job fails.

    :raises TimeoutExpired: If a compilation job exceeds :code:`timeout`
        seconds.
    """
    copt = copt or []

    bitcodes: List[Path] = []
    clang_jobs: List[ClangInvocation] = []
    ll_paths: List[Path] = []

    def _add_path(path: Path):
        if not path.is_file():
            raise FileNotFoundError(path)

        if path.suffix == ".bc":
            bitcodes.append(path.absolute())
        elif path.suffix in {".c", ".cc", ".cpp", ".cxx"}:
            clang_jobs.append(
                ClangInvocation.from_c_file(
                    path, copt=copt, system_includes=system_includes, timeout=timeout
                )
            )
        elif path.suffix == ".ll":
            ll_paths.append(path)
        else:
            raise ValueError(f"Unrecognized file type: {path.name}")

    # Determine from inputs the list of pre-compiled bitcodes and the clang
    # invocations required to compile the bitcodes.
    if isinstance(inputs, str) or isinstance(inputs, Path):
        _add_path(Path(inputs))
    elif isinstance(inputs, ClangInvocation):
        clang_jobs.append(inputs)
    else:
        for input in inputs:
            if isinstance(input, str) or isinstance(input, Path):
                _add_path(Path(input))
            elif isinstance(input, ClangInvocation):
                clang_jobs.append(input)
            else:
                raise TypeError(f"Invalid input type: {type(input).__name__}")

    # Shortcut if we only have a single pre-compiled bitcode.
    if len(bitcodes) == 1 and not clang_jobs and not ll_paths:
        bitcode = bitcodes[0]
        return Benchmark.from_file(uri=f"benchmark://file-v0{bitcode}", path=bitcode)

    tmpdir_root = transient_cache_path(".")
    tmpdir_root.mkdir(exist_ok=True, parents=True)
    with tempfile.TemporaryDirectory(
        dir=tmpdir_root, prefix="llvm-make_benchmark-"
    ) as d:
        working_dir = Path(d)

        clang_outs = [
            working_dir / f"clang-out-{i}.bc" for i in range(1, len(clang_jobs) + 1)
        ]
        llvm_as_outs = [
            working_dir / f"llvm-as-out-{i}.bc" for i in range(1, len(ll_paths) + 1)
        ]

        # Run the clang and llvm-as invocations in parallel. Avoid running this
        # code path if possible as get_thread_pool_executor() requires locking.
        if clang_jobs or ll_paths:
            llvm_as_path = str(llvm.llvm_as_path())
            executor = get_thread_pool_executor()

            llvm_as_commands = [
                [llvm_as_path, str(ll_path), "-o", bc_path]
                for ll_path, bc_path in zip(ll_paths, llvm_as_outs)
            ]

            # Fire off the clang and llvm-as jobs.
            futures = [
                executor.submit(run_command, job.command(out), job.timeout)
                for job, out in zip(clang_jobs, clang_outs)
            ] + [
                executor.submit(run_command, command, timeout)
                for command in llvm_as_commands
            ]

            # Block until finished.
            list(future.result() for future in as_completed(futures))

            # Check that the expected files were generated.
            for clang_job, bc_path in zip(clang_jobs, clang_outs):
                if not bc_path.is_file():
                    raise BenchmarkInitError(
                        f"clang failed: {' '.join(clang_job.command(bc_path))}"
                    )
            for command, bc_path in zip(llvm_as_commands, llvm_as_outs):
                if not bc_path.is_file():
                    raise BenchmarkInitError(f"llvm-as failed: {command}")

        all_outs = bitcodes + clang_outs + llvm_as_outs
        if not all_outs:
            raise ValueError("No inputs")
        elif len(all_outs) == 1:
            # We only have a single bitcode so read it.
            with open(str(all_outs[0]), "rb") as f:
                bitcode = f.read()
        else:
            # Link all of the bitcodes into a single module.
            llvm_link_cmd = [str(llvm.llvm_link_path()), "-o", "-"] + [
                str(path) for path in bitcodes + clang_outs
            ]
            with Popen(
                llvm_link_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
            ) as llvm_link:
                bitcode, stderr = llvm_link.communicate(timeout=timeout)
                if llvm_link.returncode:
                    raise BenchmarkInitError(
                        f"Failed to link LLVM bitcodes with error: {stderr.decode('utf-8')}"
                    )

    timestamp = datetime.now().strftime("%Y%m%HT%H%M%S")
    uri = f"benchmark://user-v0/{timestamp}-{random.randrange(16**4):04x}"
    return Benchmark.from_file_contents(uri, bitcode)