def generate_lib(node, *args, **kwargs): """ generate a lib file from the excel file describing the digital <-> analog interface """ # get current working directory output_dir = utils.get_tmp_folder() # db path db_path = os.path.join(output_dir, "db.json") if os.path.exists(db_path): with open(db_path, "r") as fp: db = json.load(fp) else: db = {} if "libs" not in db: db["libs"] = {} # generate libs for synthesis sheetname = node.params.get( "TAGS")[-1] if "TAGS" in node.params else "Timing" libs, lib = libgen.main(node.name, output_dir, sheetname) # register generated file for libnode in libs: n = copy.deepcopy(node) n.name = libnode yield n # register in database db["libs"][lib.name] = lib.to_dict() with open(db_path, "w+") as fp: fp.write( json.dumps(db, indent=2, sort_keys=True, default=utils.json_encoder))
def generate_file(node, *args, **kwargs): """ generate a verilog file from a template and a database or a dependency """ # get current working directory output_dir = utils.get_tmp_folder() # read dependancies db_path = os.path.join(output_dir, "db.json") if os.path.exists(db_path): with open(db_path, "r") as fp: db = json.load(fp) # deserialize db["libs"] = { name: libgen.Lib.from_json(lib) for name, lib in db.get("libs", {}).items() } db["modules"] = [ verilog.Module.from_json(m) for m in db.get("modules", {}).values() ] # generate file from the template _tmp = Template(filename=node.name) with open(os.path.join(output_dir, node.name.replace(".mako", "")), "w+") as fp: fp.write(_tmp.render_unicode(**db)) # return the file generated from the template n = copy.deepcopy(node) n.name = node.name.replace(".mako", "") return n
def run_sim(files, params): # update global variables Config.add_configs(os.path.join(TOOLS_DIR, "tools.config")) DEFAULT_TMPDIR = utils.get_tmp_folder("sim") WAVE_FORMAT = Config.iverilog.get("format") WAVE = os.path.join(DEFAULT_TMPDIR, "run.%s" % WAVE_FORMAT) # prepare scripts flags = prepare(files, params) compile(*flags) relog.step("Running simulation") VVP_FLAGS = [] if "SIM_FLAGS" in params: for flag in params["SIM_FLAGS"]: tf = transform_flags(flag) if tf and tf[:2] in ("-m", "-M"): VVP_FLAGS.append(tf) VVP_FLAGS = " ".join(VVP_FLAGS) executor.sh_exec( "vvp -i %s %s -%s" % (EXE, VVP_FLAGS, WAVE_FORMAT), SIM_LOG, MAX_TIMEOUT=300, SHOW_CMD=True, ) # move the dumpfile to TMPDIR if os.path.exists(WAVE): os.remove(WAVE) if os.path.exists("./dump.%s" % WAVE_FORMAT): os.rename("./dump.%s" % WAVE_FORMAT, WAVE) return relog.get_stats(SIM_LOG)
def run_lint(files, params): # update global variables Config.add_configs(os.path.join(TOOLS_DIR, "tools.config")) DEFAULT_TMPDIR = utils.get_tmp_folder("lint") # prepare scripts flags = prepare(files, params) compile(*flags) relog.step("Linting files") relog.display_log(PARSER_LOG) return relog.get_stats(SIM_LOG)
def run_lint(files, params): Config.add_config(os.path.join(TOOLS_DIR, "tools.config")) WAVE_FORMAT = Config.xcellium.get("format") DEFAULT_TMPDIR = utils.get_tmp_folder("lint") SRCS = os.path.join(DEFAULT_TMPDIR, "srcs.list") PARSER_LOG = os.path.join(DEFAULT_TMPDIR, "parser.log") SIM_LOG = os.path.join(DEFAULT_TMPDIR, "sim.log") WAVE = os.path.join(DEFAULT_TMPDIR, "run.%s" % WAVE_FORMAT) # generate scripts gen = prepare(files, params) flags = " ".join(chain([gen, "-hal"], Config.ncsim.get("flags").split())) # lint executor.sh_exec("xrun %s -f %s" % (flags, SRCS), PARSER_LOG, MAX_TIMEOUT=300)
def run_sim(files, params): Config.add_config(os.path.join(TOOLS_DIR, "tools.config")) WAVE_FORMAT = Config.ncsim.get("format") DEFAULT_TMPDIR = utils.get_tmp_folder("sim") SRCS = os.path.join(DEFAULT_TMPDIR, "srcs.list") PARSER_LOG = os.path.join(DEFAULT_TMPDIR, "parser.log") SIM_LOG = os.path.join(DEFAULT_TMPDIR, "sim.log") WAVE = os.path.join(DEFAULT_TMPDIR, "run.%s" % WAVE_FORMAT) # generate scripts gen = prepare(files, params) flags = " ".join(chain([gen], Config.ncsim.get("flags").split())) # run simulation relog.step("Running simulation") executor.sh_exec("irun %s -f %s" % (flags, SRCS), PARSER_LOG, MAX_TIMEOUT=300)
def add_in_database(node, *args, **kwargs): """ add a database list all blocks and information concerning them to populate mako template if the database does not exist, it create the database """ # get current working directory output_dir = utils.get_tmp_folder() os.makedirs(output_dir, exist_ok=True) # db path db_path = os.path.join(output_dir, "db.json") if os.path.exists(db_path): with open(db_path, "r") as fp: db = json.load(fp) else: db = {} if "includes" not in db: db["includes"] = [] if "modules" not in db: db["modules"] = {} if "timescales" not in db: db["timescales"] = [] # parse the verilog file includes = verilog.find_includes(node.name) db["includes"].extend(read_sources.resolve_includes(includes)) db["timescales"].extend(verilog.find_timescale(node.name)) for m in verilog.find_modules(node.name): module = verilog.Module(m[0]) if m[1]: module.parse_parameters(m[1]) module.parse_pins(m[2]) module.parse_parameters(m[-1]) module.parse_pins(m[-1]) for i in verilog.find_instances(node.name): if i[1]: instance = verilog.Instance(i[2], i[0]) instance.parse_parameters(i[1]) else: instance = verilog.Instance(i[2], i[0]) module.instances.append(instance) db["modules"][module.name] = module.to_dict() with open(db_path, "w+") as fp: fp.write(json.dumps(db, indent=2, sort_keys=True))
#!/usr/bin/env python3 # coding: utf-8 import io import os import sys import time import shutil sys.path.append(os.environ["REFLOW"]) import common.utils as utils import common.relog as relog import common.executor as executor DEFAULT_TMPDIR = utils.get_tmp_folder() SIM_LOG = None def is_file_timeout(file: str, start: int, max: int = 720, shall_exist: bool = True): file_exists = os.path.exists(file) return time.time() - start < max and (file_exists == shall_exist) def simulation_finished(log_file: str): with open(log_file, "r+") as fp: for line in fp: l = line.lower().replace("\x00", "")
def run(cwd, batch, sim_only: bool = False, cov_only: bool = False, lint_only: bool = False): N = len(batch.sections()) TMP_DIR = utils.get_tmp_folder() # create directory for simulation for k, rule in enumerate(batch): if batch.has_option(rule, "__path__"): relog.info(f"[{k}/{N}] Run simulation {rule}") p = utils.normpath(os.path.join(cwd, batch.get(rule, "__path__"))) s = eval(batch.get(rule, "__sim_type__")) o = utils.normpath(os.path.join(TMP_DIR, rule)) l = utils.normpath(os.path.join(o, "Sources.list")) b = utils.normpath(os.path.join(p, "Batch.list")) os.makedirs(o, exist_ok=True) if not os.path.exists(b): # create the Sources.list with open(l, "w+") as fp: path = batch.get(rule, "__path__") dedent = "".join(["../"] * (2 + path.count("/"))) fp.write("%s\n" % utils.normpath(os.path.join(dedent, path))) for option in batch.options(rule): if not option.startswith("__"): values = batch.get(rule, option, raw=True) if "[" in values: values = eval(values) fp.write(f"{option}={' '.join(values)}\n") else: fp.write(f"{option}={values}\n") # select which simulations should be performed batch_options = [ "sim", "cov" if cov_only else "", "lint" if lint_only else "", ] sim_only, cov_only, lint_only = ( sim_only and not cov_only and not lint_only, cov_only and not sim_only and not lint_only, lint_only and not cov_only and not sim_only, ) if not sim_only and not cov_only and not lint_only: sim_only, cov_only, lint_only = True, True, True # run the simulations run_path = utils.normpath(os.getenv("REFLOW") + "/envs/bin/run") if os.path.exists(b): for batch_option in batch_options: if batch_option: executor.sh_exec( "python3 '%s' batch %s" % (run_path, batch_option), CWD=p, ENV=os.environ.copy(), ) else: if sim_only and s in [SimType.SIMULATION, SimType.ALL]: executor.sh_exec( "python3 '%s' sim" % run_path, CWD=o, ENV=os.environ.copy(), SHELL=True, ) if cov_only and s in [SimType.COVERAGE, SimType.ALL]: executor.sh_exec( "python3 '%s' cov" % run_path, CWD=o, ENV=os.environ.copy(), SHELL=True, ) if lint_only and s in [SimType.LINT, SimType.ALL]: executor.sh_exec( "python3 '%s' lint" % run_path, CWD=o, ENV=os.environ.copy(), SHELL=True, )