Example #1
0
 def f(inp, _):
     res = None
     if inp.source_type == SourceType.TmpDir:
         res = futil_extract(Path(inp.data.name))
     else:
         res = futil_extract(Path(inp.data))
     return (Source(BytesIO(res.encode("UTF-8")), SourceType.File), None, 0)
Example #2
0
 def f(_inp, ctx):
     # Simulated 91 cycles
     r = re.search(r"Simulated (\d+) cycles",
                   _inp.data.read().decode("ascii"))
     data = {
         "cycles": int(r.group(1)),
         "memories": convert2json(ctx["tmpdir"], "out"),
     }
     buf = BytesIO(
         json.dumps(data, indent=2, sort_keys=True).encode("UTF-8"))
     return (Source(buf, SourceType.File), None, 0)
Example #3
0
    def _define_steps(self, input_data, builder, config):
        testbench = config["stages", self.name, "testbench"]
        data_path = config.get(("stages", "verilog", "data"))
        cmd = config["stages", self.name, "exec"]

        # Step 1: Make a new temporary directory
        @builder.step()
        def mktmp() -> SourceType.Directory:
            """
            Make temporary directory to store Verilator build files.
            """
            return TmpDir()

        # Step 2a: check if we need verilog.data to be passes
        @builder.step()
        def check_verilog_for_mem_read(verilog_src: SourceType.String):
            """
            Read input verilog to see if `icarus-verilog.data` needs to be set.
            """
            if "readmemh" in verilog_src:
                raise errors.MissingDynamicConfiguration("verilog.data")

        # Step 2: Transform data from JSON to Dat.
        @builder.step()
        def json_to_dat(tmp_dir: SourceType.Directory, json_path: SourceType.Stream):
            """
            Converts a `json` data format into a series of `.dat` files.
            """
            round_float_to_fixed = config["stages", self.name, "round_float_to_fixed"]
            convert2dat(
                tmp_dir.name,
                sjson.load(json_path, use_decimal=True),
                "dat",
                round_float_to_fixed,
            )

        # Step 3: compile with verilator
        cmd = " ".join(
            [
                cmd,
                "-g2012",
                "-o",
                "{exec_path}",
                testbench,
                "{input_path}",
            ]
        )

        @builder.step(description=cmd)
        def compile_with_iverilog(
            input_path: SourceType.Path, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            return shell(
                cmd.format(
                    input_path=str(input_path),
                    exec_path=f"{tmpdir.name}/{self.object_name}",
                ),
                stdout_as_debug=True,
            )

        # Step 4: simulate
        @builder.step()
        def simulate(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Simulates compiled icarus verilog program.
            """
            cycle_limit = config["stages", "verilog", "cycle_limit"]
            return shell(
                [
                    f"{tmpdir.name}/{self.object_name}",
                    f"+DATA={tmpdir.name}",
                    f"+CYCLE_LIMIT={str(cycle_limit)}",
                    f"+OUT={tmpdir.name}/output.vcd",
                    f"+NOTRACE={0 if self.is_vcd else 1}",
                ]
            )

        # Step 5(self.vcd == True): extract
        @builder.step()
        def output_vcd(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Return the generated `output.vcd`.
            """
            # return stream instead of path because tmpdir gets deleted
            # before the next stage runs
            return (Path(tmpdir.name) / "output.vcd").open("rb")

        # Step 5(self.vc == False): extract cycles + data
        @builder.step()
        def output_json(
            simulated_output: SourceType.String, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            """
            Convert .dat files back into a json file
            """
            r = re.search(r"Simulated\s+((-)?\d+) cycles", simulated_output)
            cycle_count = int(r.group(1)) if r is not None else 0
            if cycle_count < 0:
                log.warn("Cycle count is less than 0")
            data = {
                "cycles": cycle_count,
                "memories": convert2json(tmpdir.name, "out"),
            }

            # Write to a file so we can return a stream.
            out = Path(tmpdir.name) / "output.json"
            with out.open("w") as f:
                sjson.dump(data, f, indent=2, sort_keys=True, use_decimal=True)
            return out.open("rb")

        @builder.step()
        def cleanup(tmpdir: SourceType.Directory):
            """
            Cleanup build files
            """
            tmpdir.remove()

        # Schedule
        tmpdir = mktmp()
        # if we need to, convert dynamically sourced json to dat
        if data_path is None:
            check_verilog_for_mem_read(input_data)
        else:
            json_to_dat(tmpdir, Source(Path(data_path), SourceType.Path))
        compile_with_iverilog(input_data, tmpdir)
        stdout = simulate(tmpdir)
        result = None
        if self.is_vcd:
            result = output_vcd(tmpdir)
        else:
            result = output_json(stdout, tmpdir)
        cleanup(tmpdir)
        return result
Example #4
0
 def f(_inp, ctx):
     mem = convert2json(ctx['tmpdir'], 'out')
     buf = BytesIO(
         json.dumps(mem, indent=2, sort_keys=True).encode('UTF-8'))
     return (Source(buf, SourceType.File), None, 0)
Example #5
0
 def f(_inp, ctx):
     f = (Path(ctx['tmpdir']) / 'output.vcd').open('rb')
     return (Source(f, SourceType.File), None, 0)
Example #6
0
    def _define_steps(self, input_data):
        # Step 1: Make a new temporary directory
        @self.step()
        def mktmp() -> SourceType.Directory:
            """
            Make temporary directory to store Verilator build files.
            """
            return TmpDir()

        # Step 2a: check if we need verilog.data to be passes
        @self.step()
        def check_verilog_for_mem_read(verilog_src: SourceType.String):
            """
            Read input verilog to see if `verilog.data` needs to be set.
            """
            if "readmemh" in verilog_src:
                raise errors.MissingDynamicConfiguration("verilog.data")

        # Step 2: Transform data from JSON to Dat.
        @self.step()
        def json_to_dat(tmp_dir: SourceType.Directory, json_path: SourceType.Stream):
            """
            Converts a `json` data format into a series of `.dat` files.
            """
            convert2dat(tmp_dir.name, sjson.load(json_path, use_decimal=True), "dat")

        # Step 3: compile with verilator
        cmd = " ".join(
            [
                self.cmd,
                "-cc",
                "--trace",
                "{input_path}",
                "--exe " + " --exe ".join(self.testbench_files),
                "--build",
                "--top-module",
                self.config["stages", self.name, "top_module"],
                "--Mdir",
                "{tmpdir_name}",
            ]
        )

        @self.step(description=cmd)
        def compile_with_verilator(
            input_path: SourceType.Path, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            return shell(
                cmd.format(input_path=str(input_path), tmpdir_name=tmpdir.name),
                stdout_as_debug=True,
            )

        # Step 4: simulate
        @self.step()
        def simulate(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Simulates compiled Verilator code.
            """
            return shell(
                [
                    f"DATA={tmpdir.name}",
                    f"{tmpdir.name}/Vmain",
                    f"{tmpdir.name}/output.vcd",
                    str(self.config["stages", self.name, "cycle_limit"]),
                    # Don't trace if we're only looking at memory outputs
                    "--trace" if self.vcd else "",
                ]
            )

        # Step 5(self.vcd == True): extract
        @self.step()
        def output_vcd(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Return the generated `output.vcd`.
            """
            # return stream instead of path because tmpdir get's deleted
            # before the next stage runs
            return (Path(tmpdir.name) / "output.vcd").open("rb")

        # Step 5(self.vc == False): extract cycles + data
        @self.step()
        def output_json(
            simulated_output: SourceType.String, tmpdir: SourceType.Directory
        ) -> SourceType.String:
            """
            Convert .dat files back into a json and extract simulated cycles from log.
            """
            # Look for ouput like: "Simulated 91 cycles"
            r = re.search(r"Simulated (\d+) cycles", simulated_output)
            data = {
                "cycles": int(r.group(1)) if r is not None else 0,
                "memories": convert2json(tmpdir.name, "out"),
            }
            return sjson.dumps(data, indent=2, sort_keys=True, use_decimal=True)

        @self.step()
        def cleanup(tmpdir: SourceType.Directory):
            """
            Cleanup Verilator build files that we no longer need.
            """
            tmpdir.remove()

        # Schedule
        tmpdir = mktmp()
        # if we need to, convert dynamically sourced json to dat
        if self.data_path is None:
            check_verilog_for_mem_read(input_data)
        else:
            json_to_dat(tmpdir, Source(Path(self.data_path), SourceType.Path))
        compile_with_verilator(input_data, tmpdir)
        stdout = simulate(tmpdir)
        result = None
        if self.vcd:
            result = output_vcd(tmpdir)
        else:
            result = output_json(stdout, tmpdir)
        cleanup(tmpdir)
        return result
Example #7
0
    def _define_steps(self, input_data, builder, config):
        # As a debugging aid, the pass can optionally preserve the
        # (local or remote) sandbox where the Xilinx commands ran.
        save_temps = bool(config["stages", self.name, "save_temps"])

        mode = config["stages", self.name, "mode"]
        device = config["stages", self.name, "device"]

        # remote execution context
        remote_exec = RemoteExecution(builder, self, config)

        # tcl files
        self.gen_xo_tcl = (
            Path(config["global", "futil_directory"])
            / "fud"
            / "bitstream"
            / "gen_xo.tcl"
        )

        package_cmd = (
            "cd {tmpdir} && "
            "mkdir -p xclbin && "
            "/scratch/opt/Xilinx/Vivado/2020.2/bin/vivado "
            "-mode batch "
            "-source gen_xo.tcl "
            "-tclargs xclbin/kernel.xo {port_names}"
        )

        @builder.step(package_cmd)
        def package_xo(client: SourceType.UnTyped, tmpdir: SourceType.String):
            # Get the AXI port names.
            port_names = list(get_ports(Path(tmpdir) / 'kernel.xml'))

            # Run the .xo packager Vivado script.
            self._shell(client, package_cmd.format(
                tmpdir=tmpdir,
                port_names=' '.join(port_names),
            ), remote_exec)

        xclbin_cmd = (
            "cd {tmpdir} && "
            "/scratch/opt/Xilinx/Vitis/2020.2/bin/v++ -g "
            "-t {mode} "
            "--platform {device} "
            "--save-temps "
            "--profile.data all:all:all "
            "--profile.exec all:all:all "
            "-lo xclbin/kernel.xclbin "
            "xclbin/kernel.xo"
        )

        @builder.step(xclbin_cmd)
        def compile_xclbin(client: SourceType.UnTyped, tmpdir: SourceType.String):
            """
            Compile XO into xclbin.
            """
            self._shell(
                client,
                xclbin_cmd.format(tmpdir=tmpdir, mode=mode, device=device),
                remote_exec,
            )

        # Schedule
        # External stages called by this stage
        xilinx_stage = FutilStage("xilinx-verilog", "-b xilinx", "")
        xml_futil = FutilStage("xilinx-verilog", "-b xilinx-xml", "")
        kernel_futil = FutilStage(
            "xilinx-verilog", "-b verilog --synthesis -p external", ""
        )

        if remote_exec.use_ssh:
            remote_exec.import_libs()

        # Compile files using external stages
        xilinx = xilinx_stage._define_steps(input_data, builder, config)
        xml = xml_futil._define_steps(input_data, builder, config)
        kernel = kernel_futil._define_steps(input_data, builder, config)

        file_map = {
            xilinx: "toplevel.v",
            kernel: "main.sv",
            xml: "kernel.xml",
            self.gen_xo_tcl: "gen_xo.tcl",
        }
        if remote_exec.use_ssh:
            client, tmpdir = remote_exec.open_and_send(file_map)
        else:
            sandbox = LocalSandbox(builder, save_temps)
            tmpdir = sandbox.create(file_map)
            client = Source(None, SourceType.UnTyped)

        package_xo(client, tmpdir)
        compile_xclbin(client, tmpdir)

        if remote_exec.use_ssh:
            return remote_exec.close_and_get(
                client,
                tmpdir,
                "xclbin/kernel.xclbin",
                keep_tmpdir=save_temps,
            )
        else:
            return sandbox.get_file("xclbin/kernel.xclbin")
Example #8
0
File: stage.py Project: yn224/calyx
    def _define_steps(self, input_data, builder, config):

        testbench_files = [
            str(
                Path(config["global", "futil_directory"])
                / "fud"
                / "sim"
                / "testbench.cpp"
            ),
        ]
        data_path = config.get(["stages", self.name, "data"])

        # Step 1: Make a new temporary directory
        @builder.step()
        def mktmp() -> SourceType.Directory:
            """
            Make temporary directory to store Verilator build files.
            """
            return TmpDir()

        # Step 2a: check if we need verilog.data to be passes
        @builder.step()
        def check_verilog_for_mem_read(verilog_src: SourceType.String):
            """
            Read input verilog to see if `verilog.data` needs to be set.
            """
            if "readmemh" in verilog_src:
                raise errors.MissingDynamicConfiguration("verilog.data")

        # Step 2: Transform data from JSON to Dat.
        @builder.step()
        def json_to_dat(tmp_dir: SourceType.Directory, json_path: SourceType.Stream):
            """
            Converts a `json` data format into a series of `.dat` files inside the given
            temporary directory.
            """
            round_float_to_fixed = config["stages", self.name, "round_float_to_fixed"]
            convert2dat(
                tmp_dir.name,
                sjson.load(json_path, use_decimal=True),
                "dat",
                round_float_to_fixed,
            )

        # Step 3: compile with verilator
        cmd = " ".join(
            [
                config["stages", self.name, "exec"],
                "-cc",
                "--trace",
                "{input_path}",
                "--exe " + " --exe ".join(testbench_files),
                "--build",
                "--top-module",
                config["stages", self.name, "top_module"],
                "--Mdir",
                "{tmpdir_name}",
            ]
        )

        @builder.step(description=cmd)
        def compile_with_verilator(
            input_path: SourceType.Path, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            return shell(
                cmd.format(input_path=str(input_path), tmpdir_name=tmpdir.name),
                stdout_as_debug=True,
            )

        # Step 4: simulate
        @builder.step()
        def simulate(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Simulates compiled Verilator code.
            """
            return shell(
                [
                    f"{tmpdir.name}/Vmain",
                    unwrap_or(
                        config["stages", self.name, "vcd-target"],
                        f"{tmpdir.name}/output.vcd",
                    ),
                    str(config["stages", self.name, "cycle_limit"]),
                    # Don't trace if we're only looking at memory outputs
                    "--trace" if self.vcd else "",
                    f"+DATA={tmpdir.name}",
                ]
            )

        # Step 5(self.vcd == True): extract
        @builder.step()
        def output_vcd(tmpdir: SourceType.Directory) -> SourceType.Stream:
            """
            Return the generated `output.vcd`.
            """
            # return stream instead of path because tmpdir gets deleted before
            # the next stage runs

            if config["stages", self.name, "vcd-target"] is not None:
                target = Path(config["stages", self.name, "vcd-target"])
            else:
                target = Path(tmpdir.name) / "output.vcd"

            return target.open("rb")

        # Step 5(self.vcd == False): extract cycles + data
        @builder.step()
        def output_json(
            simulated_output: SourceType.String, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            """
            Convert .dat files back into a json and extract simulated cycles from log.
            """
            # Verify we haven't hit the cycle limit.
            found = re.search(r"reached limit of (\d+) cycles", simulated_output)
            if found is not None:
                raise errors.CycleLimitedReached(self.name, found.group(1))

            # Look for output like: "Simulated 91 cycles"
            r = re.search(r"Simulated (\d+) cycles", simulated_output)
            data = {
                "cycles": int(r.group(1)) if r is not None else 0,
                "memories": convert2json(tmpdir.name, "out"),
            }

            # Write to a file so we can return a stream.
            out = Path(tmpdir.name) / "output.json"
            with out.open("w") as f:
                sjson.dump(data, f, indent=2, sort_keys=True, use_decimal=True)
            return out.open("rb")

        @builder.step()
        def cleanup(tmpdir: SourceType.Directory):
            """
            Cleanup Verilator build files that we no longer need.
            """
            tmpdir.remove()

        # Schedule
        tmpdir = mktmp()
        # if we need to, convert dynamically sourced json to dat
        if data_path is None:
            check_verilog_for_mem_read(input_data)
        else:
            json_to_dat(tmpdir, Source.path(data_path))
        compile_with_verilator(input_data, tmpdir)
        stdout = simulate(tmpdir)
        result = output_vcd(tmpdir) if self.vcd else output_json(stdout, tmpdir)
        cleanup(tmpdir)
        return result
Example #9
0
    def _define_steps(self, input_data, builder, config):

        xilinx_location = config["stages", self.name, "xilinx_location"]
        xrt_location = config["stages", self.name, "xrt_location"]
        setup_commands = (f"source {xilinx_location}/settings64.sh && "
                          f"source {xrt_location}/setup.sh")

        host_cpp = config["stages", self.name, "host"]
        save_temps = bool(config["stages", self.name, "save_temps"])
        xrt = (Path(config["global", "futil_directory"]) / "fud" /
               "bitstream" / "xrt.ini")
        sim_script = (Path(config["global", "futil_directory"]) / "fud" /
                      "bitstream" / "sim_script.tcl")
        mode = config["stages", self.name, "mode"]

        # remote execution
        remote_exec = RemoteExecution(builder, self, config)

        @builder.step()
        def check_host_cpp():
            """
            Make sure that `-s wdb.host` is provided
            """
            if host_cpp is None:
                raise errors.MissingDynamicConfiguration("wdb.host")

        @builder.step()
        def compile_host(client: SourceType.UnTyped,
                         tmpdir: SourceType.String):
            """
            Compile the host code
            """
            cmd = (f"cd {tmpdir} && "
                   "g++ "
                   f"-I{xrt_location}/include "
                   f"-I{xilinx_location}/include "
                   "-Wall -O0 -g -std=c++14 -fmessage-length=0 "
                   "host.cpp "
                   "-o 'host' "
                   f"-L{xrt_location}/lib -lOpenCL -lpthread -lrt -lstdc++")
            self._shell(client, cmd)

        @builder.step()
        def generate_emconfig(client: SourceType.UnTyped,
                              tmpdir: SourceType.String):
            """
            Generate emconfig.json
            """
            cmd = (f"cd {tmpdir} && "
                   f"{xilinx_location}/bin/emconfigutil "
                   f"--platform {self.device} "
                   "--od .")
            self._shell(client, cmd)

        @builder.step()
        def emulate(client: SourceType.UnTyped, tmpdir: SourceType.String):
            """
            Emulation the xclbin
            """
            cmd = (f"cd {tmpdir} && {setup_commands} && "
                   f"XCL_EMULATION_MODE={mode} "
                   f"./host kernel.xclbin {self.device}")
            self._shell(client, cmd)

        # Schedule
        check_host_cpp()

        file_map = {
            input_data: "kernel.xclbin",
            host_cpp: "host.cpp",
            xrt: "xrt.ini",
            sim_script: "sim_script.tcl",
        }
        if remote_exec.use_ssh:
            remote_exec.import_libs()
            client, tmpdir = remote_exec.open_and_send(file_map)
        else:
            sandbox = LocalSandbox(builder, save_temps)
            tmpdir = sandbox.create(file_map)
            client = Source(None, SourceType.UnTyped)

        compile_host(client, tmpdir)
        generate_emconfig(client, tmpdir)
        emulate(client, tmpdir)

        wdb_name = f"{self.device}-0-kernel.wdb"
        if remote_exec.use_ssh:
            return remote_exec.close_and_get(
                client,
                tmpdir,
                wdb_name,
                keep_tmpdir=save_temps,
            )
        else:
            return sandbox.get_file(wdb_name)
Example #10
0
    def _define_steps(self, input_data, builder, config):

        script = config["stages", self.name, "exec"]
        data_path = config["stages", "verilog", "data"]

        cmd = [
            script,
            self.flags,
            unwrap_or(config["stages", self.name, "flags"], ""),
            "-l",
            config["global", "futil_directory"],
            "--data" if data_path else "",
            "{data_file}" if data_path else "",
            "{target}",
        ]

        if self._is_debugger():
            cmd += [
                "debug",
                self.debugger_flags,
                unwrap_or(config["stages", "debugger", "flags"], ""),
            ]

        cmd = " ".join(cmd)

        @builder.step()
        def mktmp() -> SourceType.Directory:
            """
            Make temporary directory to store Verilator build files.
            """
            return TmpDir()

        @builder.step()
        def convert_json_to_interp_json(
            tmpdir: SourceType.Directory, json_path: SourceType.Stream
        ):
            """
            Creates a data file to initialze the interpreter memories
            """
            round_float_to_fixed = config["stages", self.name, "round_float_to_fixed"]
            convert_to_json(
                tmpdir.name,
                sjson.load(json_path, use_decimal=True),
                round_float_to_fixed,
            )

        @builder.step(description=cmd)
        def interpret(
            target: SourceType.Path, tmpdir: SourceType.Directory
        ) -> SourceType.Stream:
            """
            Invoke the interpreter
            """

            command = cmd.format(
                data_file=Path(tmpdir.name) / _FILE_NAME, target=str(target)
            )

            return shell(command)

        @builder.step(description=cmd)
        def debug(
            target: SourceType.Path, tmpdir: SourceType.Directory
        ) -> SourceType.Terminal:
            """
            Invoke the debugger
            """
            command = cmd.format(
                data_file=Path(tmpdir.name) / _FILE_NAME, target=str(target)
            )
            transparent_shell(command)

        @builder.step()
        def parse_output(
            output: SourceType.Stream,
            json_path: SourceType.UnTyped,
            tmpdir: SourceType.Directory,
        ) -> SourceType.Stream:
            """
            Parses a raw interpreter output
            """

            out_path = Path(tmpdir.name) / "output.json"
            output = parse_from_json(output, json_path)

            with out_path.open("w") as f:
                sjson.dump(output, f, indent=2, sort_keys=True, use_decimal=True)

            return out_path.open("rb")

        # schedule
        tmpdir = mktmp()

        if data_path is not None:
            convert_json_to_interp_json(
                tmpdir, Source(Path(data_path), SourceType.Path)
            )

        if self._is_debugger():
            debug(input_data, tmpdir)
        else:
            result = interpret(input_data, tmpdir)

            if "--raw" in cmd:
                return parse_output(
                    result, Source(data_path, SourceType.UnTyped), tmpdir
                )
            else:
                return result
Example #11
0
 def compile_kernel(inp: SourceType.Stream) -> SourceType.Path:
     """
     TODO: write
     """
     return (self.kernel_futil.run(Source(
         inp, SourceType.Stream)).convert_to(SourceType.Path).data)
Example #12
0
 def f(_inp, ctx):
     f = (Path(ctx["tmpdir"]) / "output.vcd").open("rb")
     return (Source(f, SourceType.File), None, 0)
Example #13
0
 def f(_, ctx):
     return (Source(ctx["tmpdir_obj"], SourceType.TmpDir), None, 0)