示例#1
0
def merge_markdown(config=CONFIG):
    """ Merges all individual markdown reports (.md) into a single markdown
    you can add a report:[Capacitors, Diodes...] in config.yml to define the merge order
    """
    mask_name = config["mask"]["name"]
    reports_directory = config["gds_directory"]
    report_path = config["mask_directory"] / (mask_name + ".md")

    with open(report_path, "w") as f:

        def wl(line="", eol="\n"):
            f.write(line + eol)

        doe_names_list = CONFIG.get("report")
        """ check if reports follows a particular order """
        if doe_names_list:
            for doe_name in doe_names_list:
                filename = os.path.join(reports_directory, doe_name + ".md")
                with open(filename) as infile:
                    for line in infile:
                        f.write(line)

        else:
            reports = sorted(glob(os.path.join(reports_directory, "*.md")))
            for filename in reports:
                with open(filename) as infile:
                    for line in infile:
                        f.write(line)

    logging.info("Wrote {}".format(os.path.relpath(report_path)))
def merge_markdown(
    reports_directory=CONFIG["doe_directory"],
    mdpath=CONFIG["mask_directory"] / "report.md",
    **kwargs,
):
    """Merges all individual markdown reports (.md) into a single markdown
    you can add a report:[Capacitors, Diodes...] in config.yml to define the merge order
    """
    logging.debug("Merging Markdown files:")
    configpath = mdpath.with_suffix(".yml")

    with open(configpath, "w") as f:
        conf.update(**kwargs)
        f.write(OmegaConf.to_yaml(conf))

    with open(mdpath, "w") as f:

        def wl(line="", eol="\n"):
            f.write(line + eol)

        reports = sorted(glob(os.path.join(reports_directory, "*.md")))
        for filename in reports:
            with open(filename) as infile:
                for line in infile:
                    f.write(line)

    logging.info(f"Wrote {mdpath}")
    logging.info(f"Wrote {configpath}")
示例#3
0
def build_cache_pull():
    """ Pull devices from the cache """
    if CONFIG.get("cache_url"):
        logging.info("Loading devices from cache...")
        check_call([
            "rsync",
            "-rv",
            "--delete",
            CONFIG["cache_url"],
            CONFIG["build_directory"] + "/",
        ])
示例#4
0
def run_command(command):
    """ Run a command and keep track of some context """
    logging.info("Running `{}`".format(command))

    # Run the process and handle errors
    time0 = time.time()
    process = subprocess.Popen(shlex.split(command),
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    stdout, stderr = process.communicate()
    total_time = time.time() - time0

    # Either show that ther was an error, or just leave it
    if process.returncode == 0:
        message = "`{}` ran without errors in {:.2f}s.".format(
            shorten_command(command), total_time)
        logging.info(message)
        if stdout.strip():
            message = "Output of `{}`:".format(shorten_command(command))
            logging.info(message)
            logging.info(stdout.strip(), extra={"raw": True})
    else:
        message = "Error in `{}`".format(shorten_command(command))
        logging.error(message)
        raw = stdout.strip() + "\n" + stderr.strip()
        logging.error(raw, extra={"raw": True})

    return command, process.returncode
示例#5
0
def build_cache_push():
    """ Push devices to the cache """
    if not os.listdir(CONFIG["build_directory"]):
        logging.info("Nothing to push")
        return

    if CONFIG.get("cache_url"):
        logging.info("Uploading devices to cache...")
        check_call([
            "rsync",
            "-rv",
            CONFIG["build_directory"] + "/",
            CONFIG["cache_url"],
            "--delete",
        ])
示例#6
0
def merge_json(
    doe_directory=CONFIG["doe_directory"],
    extra_directories=[CONFIG["gds_directory"]],
    jsonpath=CONFIG["mask_directory"] / "metadata.json",
    json_version=6,
    config=conf,
):
    """ Merge several JSON files from config.yml
    in the root of the mask directory, gets mask_name from there

    Args:
        mask_config_directory: defaults to current working directory
        json_version:

    """
    logging.debug("Merging JSON files:")
    cells = {}
    config = config or {}
    update_config_modules(config=config)

    for directory in extra_directories + [doe_directory]:
        for filename in directory.glob("*/*.json"):
            logging.debug(filename)
            with open(filename, "r") as f:
                data = json.load(f)
                cells.update(data.get("cells"))

    does = {
        d.stem: json.loads(open(d).read())
        for d in doe_directory.glob("*.json")
    }
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
        config=OmegaConf.to_container(config),
    )

    write_config(metadata, jsonpath)
    print(f"Wrote  metadata in {jsonpath}")
    logging.info(f"Wrote  metadata in {jsonpath}")
    return metadata
示例#7
0
def build_devices(regex=".*", overwrite=True):
    """ Builds all the python files in devices/ """
    # Avoid accidentally rebuilding devices
    if (os.path.isdir(CONFIG["gds_directory"])
            and os.listdir(CONFIG["gds_directory"]) and not overwrite):
        print("Run `make clean` to remove already built devices.")
        sys.exit(0)

    # Collect all the files to run.
    all_files = [
        os.path.join(dp, f)
        for dp, dn, filenames in os.walk(CONFIG["devices_directory"])
        for f in filenames if os.path.splitext(f)[1] == ".py"
    ]
    all_files = sorted(all_files)
    all_files = [f for f in all_files if re.search(regex, f)]

    # Notify user
    logging.info("Building splits on {} threads. {} files to run.".format(
        multiprocessing.cpu_count(), len(all_files)))
    logging.info("Debug information at {}".format(
        os.path.relpath(os.path.join(CONFIG["log_directory"], "debug.log"))))

    # Now run all the files in batches of $CPU_SIZE.
    with Pool(processes=multiprocessing.cpu_count()) as pool:
        for filename, rc in pool.imap_unordered(run_python, all_files):
            logging.debug("Finished {} {}".format(filename, rc))

    # Report on what we did.
    devices = glob(os.path.join(CONFIG["gds_directory"], "*.gds"))
    countmsg = "There are now {} GDS files in {}.".format(
        len(devices), os.path.relpath(CONFIG["gds_directory"]))
    logging.info("Finished building devices. {}".format(countmsg))
示例#8
0
def run_python(filename):
    """ Run a python script and keep track of some context """
    logging.debug("Running `{}`.".format(filename))
    command = ["python", filename]

    # Run the process
    t = time.time()
    process = Popen(command, stdout=PIPE, stderr=PIPE)
    stdout, _ = process.communicate()
    total_time = time.time() - t
    if process.returncode == 0:
        logging.info("v {} ({:.1f}s)".format(os.path.relpath(filename),
                                             total_time))
    else:
        logging.info("! Error in {} {:.1f}s)".format(os.path.relpath(filename),
                                                     total_time))
        # message = "! Error in `{}`".format(basename(filename))
        # logging.error(message, exc_info=(Exception, stderr.strip(), None))
    if len(stdout.decode().strip()) > 0:
        logging.debug("Output of python {}:\n{}".format(
            filename, stdout.strip()))
    return filename, process.returncode
示例#9
0
def merge_json(config=CONFIG, json_version=6):
    """ Merge several JSON files from mask_config_directory
    requires a config.yml in the root of the mask directory

    Args:
        mask_config_directory: defaults to current working directory
        json_version: for maskhub parser
        jsons_filepaths: if we want to supply individual json files
        
    """
    if config.get("mask") is None:
        raise ValueError(f"mask config missing from {config['cwd']}")

    config = update_config_modules(config)

    mask_name = config["mask"]["name"]
    jsons_directory = config["gds_directory"]
    json_out_path = config["mask_directory"] / (mask_name + ".json")

    cells = {}
    does = {}
    logging.debug("Merging JSON files:")

    for filename in jsons_directory.glob("*.json"):
        logging.debug(filename)
        with open(filename, "r") as f:
            data = json.load(f)
            if data.get("type") == "doe":
                does[data["name"]] = data
            else:
                cells.update(data.get("cells"))

    config.update({"json_version": json_version, "cells": cells, "does": does})
    write_config(config, json_out_path)
    logging.info("Wrote {}".format(os.path.relpath(json_out_path)))
    return config