예제 #1
0
def run(asc: str):
    relog.step("Running simulation")
    os.makedirs(DEFAULT_TMPDIR, exist_ok=True)
    # use the appropriate program
    # depending on the platform
    log = asc.replace(".asc", ".log")
    if sys.platform == "darwin":
        ltspice = "/Applications/LTspice.app/Contents/MacOS/LTspice"
    elif sys.platform == "unix" or "linux" in sys.platform:
        ltspice = 'wine64 "%s"' % utils.wine.locate("XVIIx64.exe")
        # to speed up wine
        # wine reg add 'HKEY_CURRENT_USER\Software\Wine\Direct3D' /v MaxVersionGL /t REG_DWORD /d 0x30003 /f
        # winetricks orm=backbuffer glsl=disable for NVIDIA driver
        # do not allow the WM to decorate window
        window_path = io.StringIO()
        executor.sh_exec("winepath -w '%s'" % asc,
                         window_path,
                         NOERR=True,
                         NOOUT=True)
        asc = utils.normpath(window_path.getvalue().strip())
    else:
        ltspice = "XVIIx64.exe"
    # start the simulation
    gen = executor.ish_exec('%s -Run "%s"' % (ltspice, asc),
                            SIM_LOG,
                            MAX_TIMEOUT=300,
                            NOERR=True)
    proc = next(gen)
    # watch the log file to determine when
    # the simulation ends
    sim_done = watch_log(log)
    if proc:
        proc.kill()
    return 0, not sim_done  # relog.get_stats(SIM_LOG)
예제 #2
0
    def __new__(cls, value):
        value = normpath(value)
        system = platform.system()
        if system == 'Windows':
            value = value.lstrip(cls.long_path_windows_prefix)

        if isinstance(value, FilePath):
            return str.__new__(cls, value)
        value = value.replace('\\', '/')
        return str.__new__(cls, value)
예제 #3
0
    def save_tf_export(self, session):
        self._prepare_export_path()

        # save builder
        builder = tf.saved_model.builder.SavedModelBuilder(
            self.config.export_path)
        # Save the model
        builder.add_meta_graph_and_variables(
            session,
            [tf.saved_model.tag_constants.SERVING],
            signature_def_map=self.signature_def_map,
        )
        builder.save()

        tf.gfile.GFile(utils.normpath(self.config.export_path +
                                      '/io_config.json'),
                       mode='w').write(
                           json.dumps({
                               'input_name': self.X.name,
                               'output_name': self.model.name,
                               'input_type': self.X.dtype.name,
                               'output_type': self.model.dtype.name,
                           }))
예제 #4
0
def read_batch(batch_file: str):
    # parser for config file
    batch = configparser.ConfigParser(
        allow_no_value=True,
        strict=True,
        empty_lines_in_values=False,
        inline_comment_prefixes=("#", ";"),
    )
    # keep case of string
    batch.optionxform = str
    # override section regex
    batch.SECTCRE = re.compile(r"[ \t]*(?P<header>[^:]+?)[ \t]*:")
    # check input exist
    if not os.path.exists(batch_file):
        raise Exception("%s does not exist" % batch_file)
    # get batch description file path
    if os.path.isdir(batch_file):
        filepath = utils.normpath(os.path.join(batch_file, "Batch.list"))
    else:
        filepath = batch_file
    # parse the batch file
    try:
        batch.read([filepath])
    except configparser.DuplicateSectionError as dse:
        relog.error((
            "batch cannot accept duplicate rules\n\t",
            "consider apply a label 'folder > label [@SIM_TYPE]:' to %s" %
            dse.section,
            "\n\tor a in this format 'do SIM_TYPE on folder as label:'",
        ))
    except configparser.MissingSectionHeaderError:
        # add folder of a tc in default category
        # !! should be processed in normalize!!
        with open(filepath, "r+") as fp:
            batch.read_string("default:\n" + fp.read())
    normalize_config(batch)
    return batch
예제 #5
0
def run(cwd,
        batch,
        sim_only: bool = False,
        cov_only: bool = False,
        lint_only: bool = False):
    N = len(batch.sections())
    TMP_DIR = utils.get_tmp_folder()
    # create directory for simulation
    for k, rule in enumerate(batch):
        if batch.has_option(rule, "__path__"):
            relog.info(f"[{k}/{N}] Run simulation {rule}")
            p = utils.normpath(os.path.join(cwd, batch.get(rule, "__path__")))
            s = eval(batch.get(rule, "__sim_type__"))
            o = utils.normpath(os.path.join(TMP_DIR, rule))
            l = utils.normpath(os.path.join(o, "Sources.list"))
            b = utils.normpath(os.path.join(p, "Batch.list"))
            os.makedirs(o, exist_ok=True)
            if not os.path.exists(b):
                # create the Sources.list
                with open(l, "w+") as fp:
                    path = batch.get(rule, "__path__")
                    dedent = "".join(["../"] * (2 + path.count("/")))
                    fp.write("%s\n" %
                             utils.normpath(os.path.join(dedent, path)))
                    for option in batch.options(rule):
                        if not option.startswith("__"):
                            values = batch.get(rule, option, raw=True)
                            if "[" in values:
                                values = eval(values)
                                fp.write(f"{option}={' '.join(values)}\n")
                            else:
                                fp.write(f"{option}={values}\n")
            # select which simulations should be performed
            batch_options = [
                "sim",
                "cov" if cov_only else "",
                "lint" if lint_only else "",
            ]
            sim_only, cov_only, lint_only = (
                sim_only and not cov_only and not lint_only,
                cov_only and not sim_only and not lint_only,
                lint_only and not cov_only and not sim_only,
            )
            if not sim_only and not cov_only and not lint_only:
                sim_only, cov_only, lint_only = True, True, True
            # run the simulations
            run_path = utils.normpath(os.getenv("REFLOW") + "/envs/bin/run")
            if os.path.exists(b):
                for batch_option in batch_options:
                    if batch_option:
                        executor.sh_exec(
                            "python3 '%s' batch %s" % (run_path, batch_option),
                            CWD=p,
                            ENV=os.environ.copy(),
                        )
            else:
                if sim_only and s in [SimType.SIMULATION, SimType.ALL]:
                    executor.sh_exec(
                        "python3 '%s' sim" % run_path,
                        CWD=o,
                        ENV=os.environ.copy(),
                        SHELL=True,
                    )
                if cov_only and s in [SimType.COVERAGE, SimType.ALL]:
                    executor.sh_exec(
                        "python3 '%s' cov" % run_path,
                        CWD=o,
                        ENV=os.environ.copy(),
                        SHELL=True,
                    )
                if lint_only and s in [SimType.LINT, SimType.ALL]:
                    executor.sh_exec(
                        "python3 '%s' lint" % run_path,
                        CWD=o,
                        ENV=os.environ.copy(),
                        SHELL=True,
                    )
예제 #6
0
import sys
import subprocess
from itertools import chain

sys.path.append(os.environ["REFLOW"])

import common.utils as utils
import common.relog as relog
import common.executor as executor
from common.read_config import Config
from common.read_sources import resolve_includes
from common.utils.files import get_type, is_digital

WAVE_FORMAT = "vcd"
DEFAULT_TMPDIR = utils.get_tmp_folder()
SRCS = utils.normpath(os.path.join(DEFAULT_TMPDIR, "srcs.list"))
EXE = utils.normpath(os.path.join(DEFAULT_TMPDIR, "run.vvp"))
PARSER_LOG = utils.normpath(os.path.join(DEFAULT_TMPDIR, "parser.log"))
SIM_LOG = utils.normpath(os.path.join(DEFAULT_TMPDIR, "sim.log"))
WAVE = utils.normpath(os.path.join(DEFAULT_TMPDIR, "run.%s" % WAVE_FORMAT))
TOOLS_DIR = utils.normpath(os.path.dirname(os.path.abspath(__file__)))


def transform_flags(flags: str) -> str:
    flags = flags.strip()
    # replace any values found by the key
    matches = {
        "+define+": ["-DEFINE+", "-define+", "-D", "-d"],
        "+parameter+": ["-PARAM+", "-param+", "-P", "-p"],
    }
    for output, inputs in matches.items():
예제 #7
0
def load_raw(filename):
    """
    Parses an ascii raw data file, generates and returns a dictionary with the
    following structure:
        {
            "title": <str>,
            "date:": <str>,
            "plotname:": <str>,
            "flags:": <str>,
            "no_vars:": <str>,
            "no_points:": <str>,
            "vars": [
                { "idx": <int>, "name": <str>, "type": <str> },
                { "idx": <int>, "name": <str>, "type": <str> }
                ...
                { "idx": <int>, "name": <str>, "type": <str> }
            ]
            "values": {
                "var1": <numpy.ndarray>,
                "var2": <numpy.ndarray>,
                ...
                "varN": <numpy.ndarray>
            }
        }

        Arguments:
            :filename: path to file with raw data.
        Returns
            dict with structure described above.
    """
    filename = utils.normpath(filename)
    if not filename.endswith(".raw"):
        for raw in Path(filename).rglob("**/*.raw"):
            if not ".op.raw" in str(raw):
                filename = str(raw)
                break
    filename = utils.normpath(filename)
    print(filename)
    ret, header = {}, []
    mode, data, time = None, None, None
    binary_index = 0
    with open(filename, "rb") as f:
        for line in f:
            if line[0] == b"\x00":
                sline = str(line.decode("utf-16-be", errors="ignore"),
                            encoding="utf-8").strip()
            else:
                sline = str(line.replace(b"\x00", b""),
                            encoding="utf-8").strip()
            if "Binary:" not in sline and "Values:" not in sline:
                header.append(sline)
            else:
                if "Values:" in sline:
                    relog.error("Ascii waveforms are not yet supported")
                binary_index = f.tell() + 1
                break
    # get simulation informations
    RE_KEY_VALUE = r"(?P<key>[A-Z][\w \.]+):\s*(?P<value>\w.*\w)"
    ret = {}
    matches = re.finditer(RE_KEY_VALUE, "\n".join(header))
    for match in matches:
        k, v = match.groups()
        if "Variables" != k:
            ret[k.lower().replace(". ", "_")] = v
    matches = re.search(r"^Variables:\s*(?P<vars>\w.*\w)",
                        "\n".join(header),
                        flags=re.MULTILINE | re.DOTALL)
    ret.update(matches.groupdict())
    if not ret:
        relog.error("No information found in raw file")
        exit(0)
    # normalize
    ret["tools"] = ret.pop("command")
    ret["no_vars"] = int(ret.pop("no_variables"))
    ret["no_points"] = int(ret["no_points"])
    ret["offset"] = float(ret["offset"])

    # vars
    pattern = r"\s*(?P<idx>\d+)\s+" r"(?P<name>\S+)\s+" r"(?P<type>.*)\s*"
    m_vars = re.finditer(pattern, ret["vars"])

    def transform(i):
        d = i.groupdict()
        d["idx"] = int(d["idx"])
        return d

    ret["vars"] = sorted((transform(i) for i in m_vars),
                         key=lambda x: x["idx"])

    # determine mode
    if "FFT" in ret["plotname"]:
        mode = "FFT"
    elif "Transient" in ret["plotname"]:
        mode = "Transient"
    elif "AC" in ret["plotname"]:
        mode = "AC"

    # parse binary section
    nb_vars = ret["no_vars"]
    nb_pts = ret["no_points"]
    data, freq, time = [], None, None

    # read number of steps in the log file
    nb_steps = 0
    with open(filename.replace(".raw", ".log"), "r+") as fp:
        for line in fp:
            if line.startswith(".step"):
                nb_steps += 1
    ret["nb_steps"] = nb_steps
    steps_indices = []

    if mode == "FFT" or mode == "AC":
        data = np.fromfile(filename, dtype=np.complex128, offset=binary_index)
        freq = np.abs(data[::nb_vars])
        data = np.reshape(data, (nb_pts, nb_vars))
    elif mode == "Transient":
        # time is 8 bytes but is also part of variables
        # values for each variable is 4 bytes
        # so expect to have (nb_vars-1) * 4 + 8 = (nb_vars + 1) * 4
        # for each point: in total nb_pts * (nb_vars + 1) * 4
        buf_length = nb_pts * (nb_vars + 1) * 4
        # check file size to know if stepped simulation
        is_stepped = os.stat(filename).st_size > buf_length + binary_index
        print(f"stepped simulation: {nb_steps}")
        with open(filename, "rb") as fp:
            # read data
            fp.seek(binary_index)
            data = np.frombuffer(fp.read(buf_length), dtype=np.float32)
            # calculate time axis
            time = []
            for i in range(nb_pts):
                fp.seek(binary_index + i * (nb_vars + 1) * 4)
                t = struct.unpack("d", fp.read(8))[0]
                time.append(t)
                if i > 0 and t == 0:
                    steps_indices.append(i)
            steps_indices.append(nb_pts)
        # reshape data
        data = np.array(data).reshape((nb_pts, nb_vars + 1))
    ret["steps_idx"] = [(0, j) if i == 0 else (steps_indices[i - 1], j)
                        for i, j in enumerate(steps_indices)]
    ret["values"] = {
        ret["vars"][i - 1].get("name", ""): data[:, i]
        for i in range(2, nb_vars)
    }
    ret["freq"] = freq
    ret["time"] = time
    return ret