def configure(self, edam): super().configure(edam) incdirs = [] sv_files = [] unused_files = [] for f in self.files: if f.get("file_type").startswith("systemVerilogSource"): if not self._add_include_dir(f, incdirs): sv_files.append(f["name"]) else: unused_files.append(f) output_file = self.name + ".v" self.edam = edam.copy() self.edam["files"] = unused_files self.edam["files"].append({ "name": output_file, "file_type": "verilogSource", }) sv2v_options = self.tool_options.get("sv2v_options", []) commands = EdaCommands() commands.add( ["sv2v", "-w", output_file] + sv2v_options + ["-I" + d for d in incdirs] + sv_files, [output_file], sv_files, ) self.commands = commands.commands
def configure(self, edam): super().configure(edam) unused_files = [] asc_file = "" bin_file = "" for f in self.files: if f["file_type"] == "iceboxAscii": if asc_file: raise RuntimeError( "Icepack only supports one input file. Found {} and {}".format( asc_file, f["name"] ) ) asc_file = f["name"] else: unused_files.append(f) if not asc_file: raise RuntimeError("No input file specified for icepack") bin_file = os.path.splitext(asc_file)[0] + ".bin" self.edam = edam.copy() self.edam["files"] = unused_files self.edam["files"].append({"name": bin_file, "file_type": "binary"}) # Image generation depends = asc_file targets = bin_file command = ["icepack", depends, targets] commands = EdaCommands() commands.add(command, [targets], [depends]) self.commands = commands.commands
def configure(self, edam): super().configure(edam) unused_files = [] asc_file = "" for f in self.files: if f["file_type"] == "iceboxAscii": if asc_file: raise RuntimeError( "Icetime only supports one input file. Found {} and {}" .format(asc_file, f["name"])) asc_file = f["name"] else: unused_files.append(f) if not asc_file: raise RuntimeError("No input file specified for icetime") tim_file = os.path.splitext(asc_file)[0] + ".tim" self.edam["files"] = unused_files self.edam["files"].append({"name": tim_file, "file_type": "report"}) # Image generation depends = asc_file targets = tim_file command = ["icetime", "-r", targets] + self.tool_options.get( "icetime_options", []) command.append(depends) commands = EdaCommands() commands.add(command, [targets], [depends]) commands.add([], ["timing"], [targets]) self.commands = commands.commands
def configure(self, edam): super().configure(edam) incdirs = [] file_table = [] unused_files = [] depfiles = [] # Filter out input files for f in self.files: src = "" if "file_type" in f: file_type = f.get("file_type", "") if file_type.startswith("verilogSource"): src = f["name"] elif file_type.startswith("systemVerilogSource"): src = "-sv " + f["name"] if src: depfiles.append(f["name"]) if not self._add_include_dir(f, incdirs): file_table.append(src) else: unused_files.append(f) # Create output EDAM output_file = "slpp_all/surelog.uhdm" self.edam = edam.copy() self.edam["files"] = unused_files self.edam["files"].append({"name": output_file, "file_type": "uhdm"}) # Handle verilog defines verilog_defines = [] for key, value in self.vlogdefine.items(): verilog_params.append(f"+define+{key}={value}") # Handle verilog parameters verilog_params = [] for key, value in self.vlogparam.items(): verilog_params.append(f"-P{key}={value}") commands = EdaCommands() commands.add( ["surelog", "-top", self.toplevel] + self.tool_options.get("surelog_options", []) + ["-parse"] + verilog_defines + verilog_params + ["-I" + d for d in incdirs] + file_table, [output_file], depfiles, ) self.commands = commands.commands
def configure_main(self): # Pass apicula tool options to yosys and nextpnr self.edam["tool_options"] = { "yosys": { "arch": "gowin", "output_format": "json", "yosys_synth_options": [f"-json {self.name}.json"] + self.tool_options.get("yosys_synth_options", []), "yosys_as_subtool": True, "yosys_template": self.tool_options.get("yosys_template"), }, "nextpnr": { "device": self.tool_options.get("device"), "nextpnr_options": self.tool_options.get("nextpnr_options", []), }, } yosys = Yosys(self.edam, self.work_root) yosys.configure() nextpnr = Nextpnr(yosys.edam, self.work_root) nextpnr.flow_config = {"arch": "gowin"} nextpnr.configure() # Write Makefile commands = EdaCommands() commands.commands = yosys.commands commands.commands += nextpnr.commands # Image generation depends = self.name + ".pack" targets = self.name + ".fs" command = [ "gowin_pack", "-d", self.tool_options.get("device"), "-o", targets, depends, ] commands.add(command, [targets], [depends]) commands.set_default_target(targets) commands.write(os.path.join(self.work_root, "Makefile"))
def configure_main(self): # Pass trellis tool options to yosys and nextpnr self.edam["tool_options"] = { "yosys": { "arch": "nexus", "output_format": "json", "yosys_synth_options": self.tool_options.get("yosys_synth_options", []), "yosys_as_subtool": True, "yosys_template": self.tool_options.get("yosys_template"), }, "nextpnr": { "device": self.tool_options.get("device"), "nextpnr_options": self.tool_options.get("nextpnr_options", []), }, } yosys = Yosys(self.edam, self.work_root) yosys.configure() nextpnr = Nextpnr(yosys.edam, self.work_root) nextpnr.flow_config = {"arch": "nexus"} nextpnr.configure() # Write Makefile commands = EdaCommands() commands.commands = yosys.commands commands.commands += nextpnr.commands # Image generation depends = self.name + ".fasm" targets = self.name + ".bit" command = ["prjoxide", "pack", depends, targets] commands.add(command, [targets], [depends]) commands.set_default_target(self.name + ".bit") commands.write(os.path.join(self.work_root, "Makefile"))
def configure_vpr(self): (src_files, incdirs) = self._get_fileset_files(force_slash=True) has_vhdl = "vhdlSource" in [x.file_type for x in src_files] has_vhdl2008 = "vhdlSource-2008" in [x.file_type for x in src_files] if has_vhdl or has_vhdl2008: logger.error("VHDL files are not supported in Yosys") file_list = [] timing_constraints = [] pins_constraints = [] placement_constraints = [] for f in src_files: if f.file_type in ["verilogSource"]: file_list.append(f.name) if f.file_type in ["SDC"]: timing_constraints.append(f.name) if f.file_type in ["PCF"]: pins_constraints.append(f.name) if f.file_type in ["xdc"]: placement_constraints.append(f.name) part = self.tool_options.get("part") package = self.tool_options.get("package") vendor = self.tool_options.get("vendor") if not part: logger.error('Missing required "part" parameter') if not package: logger.error('Missing required "package" parameter') if vendor == "xilinx": if "xc7a" in part: bitstream_device = "artix7" if "xc7z" in part: bitstream_device = "zynq7" if "xc7k" in part: bitstream_device = "kintex7" partname = part + package # a35t are in fact a50t # leave partname with 35 so we access correct DB if part == "xc7a35t": part = "xc7a50t" device_suffix = "test" elif vendor == "quicklogic": partname = package device_suffix = "wlcsp" bitstream_device = part + "_" + device_suffix _vo = self.tool_options.get("vpr_options") vpr_options = ["--additional_vpr_options", f'"{_vo}"'] if _vo else [] pcf_opts = ["-p"] + pins_constraints if pins_constraints else [] sdc_opts = ["-s"] + timing_constraints if timing_constraints else [] xdc_opts = ["-x" ] + placement_constraints if placement_constraints else [] commands = EdaCommands() # Synthesis targets = self.toplevel + ".eblif" command = ["symbiflow_synth", "-t", self.toplevel] command += ["-v"] + file_list command += ["-d", bitstream_device] command += ["-p" if vendor == "xilinx" else "-P", partname] command += xdc_opts commands.add(command, [targets], []) # P&R eblif_opt = ["-e", self.toplevel + ".eblif"] device_opt = ["-d", part + "_" + device_suffix] depends = self.toplevel + ".eblif" targets = self.toplevel + ".net" command = ["symbiflow_pack" ] + eblif_opt + device_opt + sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.toplevel + ".net" targets = self.toplevel + ".place" command = ["symbiflow_place"] + eblif_opt + device_opt command += ["-n", depends, "-P", partname] command += sdc_opts + pcf_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.toplevel + ".place" targets = self.toplevel + ".route" command = ["symbiflow_route"] + eblif_opt + device_opt command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.toplevel + ".route" targets = self.toplevel + ".fasm" command = ["symbiflow_write_fasm"] + eblif_opt + device_opt command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.toplevel + ".fasm" targets = self.toplevel + ".bit" command = ["symbiflow_write_bitstream"] + ["-d", bitstream_device] command += ["-f", depends] command += ["-p" if vendor == "xilinx" else "-P", partname] command += ["-b", targets] commands.add(command, [targets], [depends]) commands.set_default_target(targets) commands.write(os.path.join(self.work_root, "Makefile"))
def configure_nextpnr(self): (src_files, incdirs) = self._get_fileset_files(force_slash=True) vendor = self.tool_options.get("vendor") # Yosys configuration yosys_synth_options = self.tool_options.get("yosys_synth_options", "") yosys_template = self.tool_options.get("yosys_template") yosys_edam = { "files": self.files, "name": self.name, "toplevel": self.toplevel, "parameters": self.parameters, "tool_options": { "yosys": { "arch": vendor, "output_format": "json", "yosys_synth_options": yosys_synth_options, "yosys_template": yosys_template, "yosys_as_subtool": True, } }, } yosys = getattr(import_module("edalize.yosys"), "Yosys")(yosys_edam, self.work_root) yosys.configure() # Nextpnr configuration arch = self.tool_options.get("arch") if arch not in self.archs: logger.error( 'Missing or invalid "arch" parameter: {} in "tool_options"'. format(arch)) package = self.tool_options.get("package") if not package: logger.error('Missing required "package" parameter') part = self.tool_options.get("part") if not part: logger.error('Missing required "part" parameter') target_family = None for family in getattr(self, "fpga_interchange_families"): if family in part: target_family = family break if target_family is None and arch == "fpga_interchange": logger.error( "Couldn't find family for part: {}. Available families: {}". format(part, ", ".join(getattr(self, "fpga_interchange_families")))) chipdb = None device = None placement_constraints = [] for f in src_files: if f.file_type in ["bba"]: chipdb = f.name elif f.file_type in ["device"]: device = f.name elif f.file_type in ["xdc"]: placement_constraints.append(f.name) else: continue if not chipdb: logger.error("Missing required chipdb file") if placement_constraints == []: logger.error("Missing required XDC file(s)") if device is None and arch == "fpga_interchange": logger.error( 'Missing required ".device" file for "fpga_interchange" arch') nextpnr_options = self.tool_options.get("nextpnr_options", "") partname = part + package # Strip speedgrade string when using fpga_interchange package = package.split("-")[0] if arch == "fpga_interchange" else None if "xc7a" in part: bitstream_device = "artix7" if "xc7z" in part: bitstream_device = "zynq7" if "xc7k" in part: bitstream_device = "kintex7" depends = self.name + ".json" xdcs = [] for x in placement_constraints: xdcs += ["--xdc", x] commands = EdaCommands() commands.commands = yosys.commands if arch == "fpga_interchange": commands.header += """ifndef INTERCHANGE_SCHEMA_PATH $(error Environment variable INTERCHANGE_SCHEMA_PATH was not found. It should be set to <fpga-interchange-schema path>/interchange) endif """ targets = self.name + ".netlist" command = ["python", "-m", "fpga_interchange.yosys_json"] command += ["--schema_dir", "$(INTERCHANGE_SCHEMA_PATH)"] command += ["--device", device] command += ["--top", self.toplevel] command += [depends, targets] commands.add(command, [targets], [depends]) depends = self.name + ".netlist" targets = self.name + ".phys" command = ["nextpnr-" + arch, "--chipdb", chipdb] command += ["--package", package] command += xdcs command += ["--netlist", depends] command += ["--write", self.name + ".routed.json"] command += ["--phys", targets] command += [nextpnr_options] commands.add(command, [targets], [depends]) depends = self.name + ".phys" targets = self.name + ".fasm" command = ["python", "-m", "fpga_interchange.fasm_generator"] command += ["--schema_dir", "$(INTERCHANGE_SCHEMA_PATH)"] command += [ "--family", family, device, self.name + ".netlist", depends, targets, ] commands.add(command, [targets], [depends]) else: targets = self.name + ".fasm" command = ["nextpnr-" + arch, "--chipdb", chipdb] command += xdcs command += ["--json", depends] command += ["--write", self.name + ".routed.json"] command += ["--fasm", targets] command += ["--log", "nextpnr.log"] command += [nextpnr_options] commands.add(command, [targets], [depends]) depends = self.name + ".fasm" targets = self.name + ".bit" command = ["symbiflow_write_bitstream", "-d", bitstream_device] command += ["-f", depends, "-p", partname, "-b", targets] commands.add(command, [targets], [depends]) commands.set_default_target(targets) commands.write(os.path.join(self.work_root, "Makefile"))
def configure(self, edam): super().configure(edam) cst_file = "" lpf_file = "" pcf_file = "" netlist = "" unused_files = [] for f in self.files: file_type = f.get("file_type", "") if file_type == "CST": if cst_file: raise RuntimeError( "Nextpnr only supports one CST file. Found {} and {}". format(cst_file, f["name"])) cst_file = f["name"] if file_type == "LPF": if lpf_file: raise RuntimeError( "Nextpnr only supports one LPF file. Found {} and {}". format(pcf_file, f["name"])) lpf_file = f["name"] if file_type == "PCF": if pcf_file: raise RuntimeError( "Nextpnr only supports one PCF file. Found {} and {}". format(pcf_file, f["name"])) pcf_file = f["name"] elif file_type == "jsonNetlist": if netlist: raise RuntimeError( "Nextpnr only supports one netlist. Found {} and {}". format(netlist, f["name"])) netlist = f["name"] else: unused_files.append(f) self.edam = edam.copy() self.edam["files"] = unused_files of = [ { "name": self.name + ".asc", "file_type": "iceboxAscii" }, ] self.edam["files"] += of # Write Makefile commands = EdaCommands() arch = self.tool_options["arch"] arch_options = [] if arch == "ecp5": targets = self.name + ".config" constraints = ["--lpf", lpf_file] if lpf_file else [] output = ["--textcfg", targets] elif arch == "gowin": device = self.tool_options.get("device") if not device: raise RuntimeError( "Missing required option 'device' for nextpnr-gowin") arch_options += ["--device", device] targets = self.name + ".pack" constraints = ["--cst", cst_file] if cst_file else [] output = ["--write", targets] else: targets = self.name + ".asc" constraints = ["--pcf", pcf_file] if pcf_file else [] output = ["--asc", targets] depends = netlist command = ["nextpnr-" + arch, "-l", "next.log"] command += arch_options + self.tool_options.get("nextpnr_options", []) command += constraints + ["--json", depends] + output # CLI target commands.add(command, [targets], [depends]) # GUI target commands.add(command + ["--gui"], ["build-gui"], [depends]) self.commands = commands.commands
def configure_main(self): (src_files, incdirs) = self._get_fileset_files() synth_out = self.name + "_synth.v" device = self.tool_options.get("device") if not device: raise RuntimeError("Missing required option 'device' for p_r") match = re.search("^CCGM1A([1-9]{1,2})$", device) if not match: raise RuntimeError("{} is not known device name".format(device)) device_number = match.groups()[0] if device_number not in ["1", "2", "4", "9", "16", "25"]: raise RuntimeError( "Rel. size {} is not unsupported".format(device_number)) ccf_file = None for f in src_files: if f.file_type == "CCF": if ccf_file: raise RuntimeError( "p_r only supports one ccf file. Found {} and {}". format(ccf_file, f.name)) else: ccf_file = f.name # Pass trellis tool options to yosys self.edam["tool_options"] = { "yosys": { "arch": "gatemate", "output_format": "verilog", "output_name": synth_out, "yosys_synth_options": self.tool_options.get("yosys_synth_options", []), "yosys_as_subtool": True, "yosys_template": self.tool_options.get("yosys_template"), }, } yosys = Yosys(self.edam, self.work_root) yosys.configure() # Write Makefile commands = EdaCommands() commands.commands = yosys.commands # PnR & image generation targets = self.name + "_00.cfg.bit" command = [ "p_r", "-A", device_number, "-i", synth_out, "-o", self.name, "-lib", "ccag", " ".join(self.tool_options.get("p_r_options", "")), ] if ccf_file is not None: command += ["-ccf", ccf_file] commands.add(command, [targets], [synth_out]) commands.set_default_target(targets) commands.write(os.path.join(self.work_root, "Makefile"))
def configure(self, edam): """ Configuration is the first phase of the build. This writes the project TCL files and Makefile. It first collects all sources, IPs and constraints and then writes them to the TCL file along with the build steps. """ super().configure(edam) src_files = [] incdirs = [] edif_files = [] has_vhdl2008 = False has_xci = False unused_files = [] bd_files = [] for f in self.files: cmd = "" if f["file_type"].startswith("verilogSource"): cmd = "read_verilog" elif f["file_type"].startswith("systemVerilogSource"): cmd = "read_verilog -sv" elif f["file_type"] == "tclSource": cmd = "source" elif f["file_type"] == "edif": cmd = "read_edif" edif_files.append(f["name"]) elif f["file_type"].startswith("vhdlSource"): cmd = "read_vhdl" if f["file_type"] == "vhdlSource-2008": has_vhdl2008 = True cmd += " -vhdl2008" if f.get("logical_name"): cmd += " -library " + f["logical_name"] elif f["file_type"] == "xci": cmd = "read_ip" has_xci = True elif f["file_type"] == "xdc": cmd = "read_xdc" elif f["file_type"] == "SDC": cmd = "read_xdc -unmanaged" elif f["file_type"] == "mem": cmd = "read_mem" elif f["file_type"] == "bd": cmd = "read_bd" bd_files.append(f["name"]) if cmd: if not self._add_include_dir(f, incdirs): src_files.append(cmd + " {" + f["name"] + "}") else: unused_files.append(f) template_vars = { "name": self.name, "src_files": "\n".join(src_files), "incdirs": incdirs + ["."], "tool_options": self.tool_options, "toplevel": self.toplevel, "vlogparam": self.vlogparam, "vlogdefine": self.vlogdefine, "generic": self.generic, "netlist_flow": bool(edif_files), "has_vhdl2008": has_vhdl2008, "has_xci": has_xci, "bd_files": bd_files, } self.render_template("vivado-project.tcl.j2", self.name + ".tcl", template_vars) jobs = self.tool_options.get("jobs", None) run_template_vars = { "jobs": " -jobs " + str(jobs) if jobs is not None else "" } self.render_template("vivado-run.tcl.j2", self.name + "_run.tcl", run_template_vars) synth_template_vars = { "jobs": " -jobs " + str(jobs) if jobs is not None else "" } self.render_template("vivado-synth.tcl.j2", self.name + "_synth.tcl", synth_template_vars) # Write Makefile commands = EdaCommands() vivado_command = ["vivado", "-notrace", "-mode", "batch", "-source"] # Create project file project_file = self.name + ".xpr" tcl_file = [self.name + ".tcl"] commands.add(vivado_command + tcl_file, [project_file], tcl_file + edif_files) synth = self.tool_options.get("synth", "vivado") if synth == "vivado": depends = [f"{self.name}_synth.tcl", project_file] targets = [f"{self.name}.runs/synth_1/__synthesis_is_complete__"] commands.add(vivado_command + depends, targets, depends) else: targets = edif_files commands.add([], ["synth"], targets) # Bitstream generation run_tcl = self.name + "_run.tcl" depends = [run_tcl, project_file] bitstream = self.name + ".bit" commands.add(vivado_command + depends, [bitstream], depends) commands.add(["vivado", project_file], ["build-gui"], [project_file]) depends = [self.name + "_pgm.tcl", bitstream] command = [ "vivado", "-quiet", "-nolog", "-notrace", "-mode", "batch", "-source", f"{self.name}_pgm.tcl", "-tclargs", ] part = self.tool_options.get("part", "") command += [part] if part else [] command += [bitstream] commands.add(command, ["pgm"], depends) commands.set_default_target(bitstream) commands.write(os.path.join(self.work_root, "Makefile")) self.commands = commands.commands self.render_template("vivado-program.tcl.j2", self.name + "_pgm.tcl")
def configure_main(self): # write Yosys tcl script file yosys_template = self.tool_options.get("yosys_template") incdirs = [] file_table = [] unused_files = [] for f in self.files: cmd = "" if f["file_type"].startswith("verilogSource"): cmd = "read_verilog" elif f["file_type"].startswith("systemVerilogSource"): cmd = "read_verilog -sv" elif f["file_type"] == "tclSource": cmd = "source" if cmd: if not self._add_include_dir(f, incdirs): file_table.append(cmd + " {" + f["name"] + "}") else: unused_files.append(f) self.edam["files"] = unused_files output_format = self.tool_options.get("output_format", "blif") default_target = f"{self.name}.{output_format}" self.edam["files"].append({ "name": default_target, "file_type": "jsonNetlist" if output_format == "json" else output_format, }) verilog_defines = [] for key, value in self.vlogdefine.items(): verilog_defines.append("{{{key} {value}}}".format(key=key, value=value)) verilog_params = [] for key, value in self.vlogparam.items(): if type(value) is str: value = '{"' + value + '"}' _s = r"chparam -set {} {} {}" verilog_params.append( _s.format(key, self._param_value_str(value), self.toplevel)) arch = self.tool_options.get("arch", None) if not arch: logger.error("ERROR: arch is not defined.") template = yosys_template or "edalize_yosys_template.tcl" template_vars = { "verilog_defines": "{" + " ".join(verilog_defines) + "}", "verilog_params": "\n".join(verilog_params), "file_table": "\n".join(file_table), "incdirs": " ".join(["-I" + d for d in incdirs]), "top": self.toplevel, "synth_command": "synth_" + arch, "synth_options": " ".join(self.tool_options.get("yosys_synth_options", "")), "write_command": "write_" + output_format, "output_format": output_format, "output_opts": "-pvector bra " if arch == "xilinx" else "", "yosys_template": template, "name": self.name, } self.render_template("edalize_yosys_procs.tcl.j2", "edalize_yosys_procs.tcl", template_vars) if not yosys_template: self.render_template("yosys-script-tcl.j2", "edalize_yosys_template.tcl", template_vars) commands = EdaCommands() commands.add( ["yosys", "-l", "yosys.log", "-p", f"'tcl {template}'"], [default_target], [template], ) if self.tool_options.get("yosys_as_subtool"): self.commands = commands.commands else: commands.set_default_target(f"{self.name}.{output_format}") commands.write(os.path.join(self.work_root, "Makefile"))
def configure(self, edam): super().configure(edam) analyze_options = self.tool_options.get("analyze_options", []) # Check of std=xx analyze option, this overyides the dynamic determination of vhdl standard import re rx = re.compile("^--std=([0-9]+)") m = None for o in analyze_options: m = rx.match(o) if m: stdarg = [m.group()] analyze_options.remove(o) break if m: logger.warning( "Analyze option " + m.group() + " given, will override any vhdlSource-xxxx specification\n" ) standard = m.group(1) else: # ghdl does not support mixing incompatible versions # specifying 93c as std should allow 87 syntax # 2008 can't be combined so try to parse everthing with 08 std has87 = has93 = has08 = False for f in self.files: if f["file_type"] == "vhdlSource-87": has87 = True elif f["file_type"] == "vhdlSource-93": has93 = True elif f["file_type"] == "vhdlSource-2008": has08 = True stdarg = [] if has08: if has87 or has93: logger.warning( "ghdl can't mix vhdlSource-2008 with other standard version\n" + "Trying with treating all as vhdlSource-2008" ) stdarg = ["--std=08"] elif has87 and has93: stdarg = ["--std=93c"] elif has87: stdarg = ["--std=87"] elif has93: stdarg = ["--std=93"] else: stdarg = ["--std=93c"] standard = rx.match(stdarg[0]).group(1) run_options = self.tool_options.get("run_options", []) analyze_options = " ".join(analyze_options) _vhdltypes = ("vhdlSource", "vhdlSource-87", "vhdlSource-93", "vhdlSource-2008") libraries = {} library_options = "--work={lib} --workdir=./{lib}" ghdlimport = "" vhdl_sources = "" # GHDL versions older than 849a25e0 don't support the dot notation (e.g. # my_lib.top_design) for the top level. # Nonetheless, we unconditionally split the library and the primary unit, # if the user specified the top level using the dot notation. top = self.toplevel.split(".") if len(top) > 2: logger.error("Invalid dot notation in toplevel: {}".format(self.toplevel)) top_libraries = "" if len(top) > 1: libraries[top[0]] = [] top_libraries = library_options.format(lib=top[0]) top_unit = top[-1] unused_files = [] for f in self.files: if f["file_type"] in _vhdltypes: # Files without a specified library will by added to # libraries[None] which is perhaps poor form but avoids # conflicts with user generated names libraries[f["logical_name"]] = libraries.get(f["logical_name"], []) + [ f["name"] ] vhdl_sources += " {file}".format(file=f["name"]) depfiles.append(f) else: unused_files.append(f) self.edam = edam.copy() self.edam["files"] = unused_files ghdlimport = "" make_libraries_directories = "" for lib, files in libraries.items(): lib_opts = "" if lib: analyze_options += " -P./{}".format(lib) make_libraries_directories += "\tmkdir -p {}\n".format(lib) lib_opts = library_options.format(lib=lib) ghdlimport += "\tghdl -i $(STD) $(ANALYZE_OPTIONS) {} {}\n".format( lib_opts, " ".join(files) ) commands = EdaCommands() if self.tool_options.get("mode") == "verilog": commands.add( ["ghdl", "-a"] + stdarg + analyze_options + [top_libraries, top_unit], # FIXME: Get names of object files here [f"work-obj{stdarg[0]}.cf"], depfiles, )
def configure(self, edam): super().configure(edam) # Future improvement: Separate include directories of c and verilog files incdirs = set() verilator_file = self.name + ".vc" vc = [] vc.append("--Mdir .") modes = ["sc", "cc", "lint-only"] # Default to cc mode if not specified mode = self.tool_options.get("mode", "cc") if not mode in modes: _s = "Illegal verilator mode {}. Allowed values are {}" raise RuntimeError(_s.format(mode, ", ".join(modes))) vc.append("--" + mode) vc += self.tool_options.get("verilator_options", []) for include_dir in incdirs: vc.append(f"+incdir+" + include_dir) vc.append("-CFLAGS -I" + include_dir) vlt_files = [] vlog_files = [] opt_c_files = [] uhdm_files = [] unused_files = [] depfiles = [] for f in self.files: file_type = f.get("file_type", "") depfile = True if file_type.startswith("systemVerilogSource" ) or file_type.startswith("verilogSource"): if not self._add_include_dir(f, incdirs): vlog_files.append(f["name"]) elif file_type in ["cppSource", "systemCSource", "cSource"]: if not self._add_include_dir(f, incdirs): opt_c_files.append(f["name"]) elif file_type == "vlt": vlt_files.append(f["name"]) elif file_type == "uhdm": uhdm_files.append(f["name"]) else: unused_files.append(f) depfile = False if depfile: depfiles.append(f["name"]) # Add created exe/.so/.a to EDAM output files? self.edam = edam.copy() self.edam["files"] = unused_files if uhdm_files: vc.append("--uhdm-ast-sv") vc += vlt_files + uhdm_files + vlog_files vc.append(f"--top-module {self.toplevel}\n") if str(self.tool_options.get("exe")).lower() != "false": vc.append("--exe") vc += opt_c_files for k, v in self.vlogparam.items(): vc.append("-G{}={}".format( k, self._param_value_str(v, str_quote_style='\\"'))) for k, v in self.vlogdefine.items(): vc.append("-D{}={}\n".format(k, self._param_value_str(v))) with open(os.path.join(self.work_root, verilator_file), "w") as ffile: ffile.write("\n".join(vc) + "\n") mk_file = f"V{self.toplevel}.mk" exe_file = f"V{self.toplevel}" commands = EdaCommands() commands.add( ["verilator", "-f", verilator_file], [mk_file], depfiles, ) if mode == "lint-only": self.default_target = mk_file else: commands.add( ["make", "-f", mk_file] + self.tool_options.get("make_options", []), [exe_file], [mk_file], ) self.default_target = exe_file self.commands = commands.commands
class Edaflow(object): @classmethod def get_flow_options(cls): flow_opts = cls.FLOW_OPTIONS.copy() for tool in cls.FLOW: (tool_name, next_nodes, flow_defined_tool_options) = tool # Get available tool options from each tool in the flow class_tool_options = getattr( import_module(f"edalize.tools.{tool_name}"), tool_name.capitalize()).get_tool_options() for opt_name in class_tool_options: # Filter out tool options that are already set by the flow if not opt_name in flow_defined_tool_options: flow_opts[opt_name] = class_tool_options[opt_name] flow_opts[opt_name]["tool"] = tool_name return flow_opts def extract_flow_options(self): # Extract flow options from the EDAM flow_options = {} available_flow_options = [ k for k, v in self.get_flow_options().items() if not v.get("tool") ] edam_flow_opts = self.edam.get("flow_options", {}) for opt_name in list(edam_flow_opts.keys()): if opt_name in available_flow_options: # self.get_flow_options(): flow_options[opt_name] = edam_flow_opts.pop(opt_name) return flow_options # Filter out tool options for each tool from self.flow_options def extract_tool_options(self): tool_options = {} edam_flow_opts = self.edam.get("flow_options", {}) for (tool_name, next_nodes, flow_defined_tool_options) in self.FLOW: # Get the tool class ToolClass = getattr(import_module(f"edalize.tools.{tool_name}"), tool_name.capitalize()) # Inject the flow-defined tool options to the EDAM tool_options[tool_name] = merge_dict( flow_defined_tool_options, tool_options.get(tool_name, {})) # Assign the EDAM-defined tool options to the right tool for opt_name in list(edam_flow_opts.keys()): if opt_name in ToolClass.get_tool_options(): tool_options[tool_name] = merge_dict( tool_options[tool_name], {opt_name: edam_flow_opts.pop(opt_name)}, ) self.edam["tool_options"] = tool_options def build_tool_graph(self): # Instantiate the tools nodes = {} for (tool_name, next_nodes, flow_defined_tool_options) in self.FLOW: # Instantiate the tool class tool_inst = getattr(import_module(f"edalize.tools.{tool_name}"), tool_name.capitalize())() # FIXME: Don't like injecting stuff like this tool_inst.next_nodes = next_nodes tool_inst.work_root = self.work_root nodes[tool_name] = tool_inst for name, node in nodes.items(): for next_node in node.next_nodes: # Add backwards references nodes[next_node].prev_nodes.add(node) return nodes def configure_tools(self, nodes): def merge_edam(a, b): # Yeah, I know. It's just a temporary hack return b unconfigured_nodes = list(nodes.values()) while unconfigured_nodes: node = unconfigured_nodes.pop(0) input_edam = {} # Check all dependencies are fulfilled all_deps_configured = True for n in node.prev_nodes: if n.edam: input_edam = merge_edam(input_edam, n.edam) else: all_deps_configured = False if all_deps_configured: # No input_edam means this is an input to the flow that should # receive the external EDAM. if not input_edam: input_edam = self.edam node.configure(input_edam) # This is an input node. Inject dependency on pre_build scripts if not node.prev_nodes: # Inject pre-build scripts before the first command # that the node executes. Note that this isn't # technically correct since the first command in # the list might not be the first command executed node.commands[0].order_only_deps = ["pre_build"] self.commands.commands += node.commands def add_scripts(self, depends, hook_name): last_script = depends for script in self.hooks.get(hook_name, []): # _env = self.env.copy() # if 'env' in script: # _env.update(script['env']) targets = script["name"] command = script["cmd"] # FIXME : Add env vars self.commands.add(command, [targets], [last_script]) last_script = script["name"] self.commands.add([], [hook_name], [last_script]) def __init__(self, edam, work_root, verbose=False): self.edam = edam self.hooks = edam.get("hooks", {}) # Extract all options that affects the flow rather than # just a single tool self.flow_options = self.extract_flow_options() # Rearrange tool_options so that each tool gets their # own tool_options self.extract_tool_options() self.work_root = work_root self.stdout = None self.stderr = None self.commands = EdaCommands() def set_run_command(self): self.commands.add([], ["run"], ["pre_run"]) def configure(self): # Add pre build hooks self.add_scripts("", "pre_build") # Instantiate all tools (nodes) and build a DAG of the flow nodes = self.build_tool_graph() # Configure the individual tools in the graph self.configure_tools(nodes) # Add post_build scripts to the end of the build chain self.add_scripts(self.commands.default_target, "post_build") self.commands.set_default_target("post_build") # Add commands to be executed during the run phase self.add_scripts("", "pre_run") self.set_run_command() self.add_scripts("run", "post_run") # Write out execution file self.commands.write(os.path.join(self.work_root, "Makefile")) def _run_tool(self, cmd, args=[], cwd=None, quiet=False): logger.debug("Running " + cmd) logger.debug("args : " + " ".join(args)) capture_output = quiet and not (self.verbose or self.stdout or self.stderr) try: cp = run( [cmd] + args, cwd=cwd, stdin=subprocess.PIPE, stdout=self.stdout, stderr=self.stderr, capture_output=capture_output, check=True, ) except FileNotFoundError: _s = "Command '{}' not found. Make sure it is in $PATH".format(cmd) raise RuntimeError(_s) except subprocess.CalledProcessError as e: _s = "'{}' exited with an error: {}".format(e.cmd, e.returncode) logger.debug(_s) if e.stdout: logger.info(e.stdout.decode()) if e.stderr: logger.error(e.stderr.decode()) logger.debug("=== STDERR ===") logger.debug(e.stderr) raise RuntimeError(_s) return cp.returncode, cp.stdout, cp.stderr def build(self): # FIXME: Get run command (e.g. make, ninja, cloud thingie..) from self.commands self._run_tool("make", cwd=self.work_root) # Most flows won't have a run phase def run(self, args): pass
def configure(self, edam): """ Configuration is the first phase of the build. This writes the project TCL files and Makefile. It first collects all sources, IPs and constraints and then writes them to the TCL file along with the build steps. """ super().configure(edam) src_files = [] incdirs = set() file_netlist = [] timing_constraints = [] for f in src_files: if f.file_type in ["blif", "eblif"]: file_netlist.append(f.name) if f.file_type in ["SDC"]: timing_constraints.append(f.name) arch_xml = self.tool_options.get("arch_xml") if not arch_xml: logger.error('Missing required "arch" parameter') _vo = self.tool_options.get("vpr_options") vpr_options = _vo if _vo else [] sdc_opts = ["--sdc_file" ] + timing_constraints if timing_constraints else [] commands = EdaCommands() depends = self.name + ".blif" targets = self.name + ".net" command = ["vpr", arch_xml, self.name + ".blif", "--pack"] command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.name + ".net" targets = self.name + ".place" command = ["vpr", arch_xml, self.name + ".blif", "--place"] command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.name + ".place" targets = self.name + ".route" command = ["vpr", arch_xml, self.name + ".blif", "--route"] command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) depends = self.name + ".route" targets = self.name + ".analysis" command = ["vpr", arch_xml, self.name + ".blif", "--analysis"] command += sdc_opts + vpr_options commands.add(command, [targets], [depends]) for ext in [".net", ".place", ".route", ".analysis"]: self.edam["files"].append({ "name": self.name + str(ext), "file_type": "vpr_" + str(ext[1:]) }) self.commands = commands.commands commands.set_default_target(targets) commands.write(os.path.join(self.work_root, "Makefile"))