Пример #1
0
def build_devices(regex=".*", overwrite=True):
    """ Builds all the python files in devices/ """
    # Avoid accidentally rebuilding devices
    if (os.path.isdir(CONFIG["gds_directory"])
            and os.listdir(CONFIG["gds_directory"]) and not overwrite):
        print("Run `make clean` to remove already built devices.")
        sys.exit(0)

    # Collect all the files to run.
    all_files = [
        os.path.join(dp, f)
        for dp, dn, filenames in os.walk(CONFIG["devices_directory"])
        for f in filenames if os.path.splitext(f)[1] == ".py"
    ]
    all_files = sorted(all_files)
    all_files = [f for f in all_files if re.search(regex, f)]

    # Notify user
    logging.info("Building splits on {} threads. {} files to run.".format(
        multiprocessing.cpu_count(), len(all_files)))
    logging.info("Debug information at {}".format(
        os.path.relpath(os.path.join(CONFIG["log_directory"], "debug.log"))))

    # Now run all the files in batches of $CPU_SIZE.
    with Pool(processes=multiprocessing.cpu_count()) as pool:
        for filename, rc in pool.imap_unordered(run_python, all_files):
            logging.debug("Finished {} {}".format(filename, rc))

    # Report on what we did.
    devices = glob(os.path.join(CONFIG["gds_directory"], "*.gds"))
    countmsg = "There are now {} GDS files in {}.".format(
        len(devices), os.path.relpath(CONFIG["gds_directory"]))
    logging.info("Finished building devices. {}".format(countmsg))
def merge_markdown(
    reports_directory=CONFIG["doe_directory"],
    mdpath=CONFIG["mask_directory"] / "report.md",
    **kwargs,
):
    """Merges all individual markdown reports (.md) into a single markdown
    you can add a report:[Capacitors, Diodes...] in config.yml to define the merge order
    """
    logging.debug("Merging Markdown files:")
    configpath = mdpath.with_suffix(".yml")

    with open(configpath, "w") as f:
        conf.update(**kwargs)
        f.write(OmegaConf.to_yaml(conf))

    with open(mdpath, "w") as f:

        def wl(line="", eol="\n"):
            f.write(line + eol)

        reports = sorted(glob(os.path.join(reports_directory, "*.md")))
        for filename in reports:
            with open(filename) as infile:
                for line in infile:
                    f.write(line)

    logging.info(f"Wrote {mdpath}")
    logging.info(f"Wrote {configpath}")
Пример #3
0
def merge_json(
    doe_directory=CONFIG["doe_directory"],
    extra_directories=[CONFIG["gds_directory"]],
    jsonpath=CONFIG["mask_directory"] / "metadata.json",
    json_version=6,
    config=conf,
):
    """ Merge several JSON files from config.yml
    in the root of the mask directory, gets mask_name from there

    Args:
        mask_config_directory: defaults to current working directory
        json_version:

    """
    logging.debug("Merging JSON files:")
    cells = {}
    config = config or {}
    update_config_modules(config=config)

    for directory in extra_directories + [doe_directory]:
        for filename in directory.glob("*/*.json"):
            logging.debug(filename)
            with open(filename, "r") as f:
                data = json.load(f)
                cells.update(data.get("cells"))

    does = {
        d.stem: json.loads(open(d).read())
        for d in doe_directory.glob("*.json")
    }
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
        config=OmegaConf.to_container(config),
    )

    write_config(metadata, jsonpath)
    print(f"Wrote  metadata in {jsonpath}")
    logging.info(f"Wrote  metadata in {jsonpath}")
    return metadata
Пример #4
0
def run_python(filename):
    """ Run a python script and keep track of some context """
    logging.debug("Running `{}`.".format(filename))
    command = ["python", filename]

    # Run the process
    t = time.time()
    process = Popen(command, stdout=PIPE, stderr=PIPE)
    stdout, _ = process.communicate()
    total_time = time.time() - t
    if process.returncode == 0:
        logging.info("v {} ({:.1f}s)".format(os.path.relpath(filename),
                                             total_time))
    else:
        logging.info("! Error in {} {:.1f}s)".format(os.path.relpath(filename),
                                                     total_time))
        # message = "! Error in `{}`".format(basename(filename))
        # logging.error(message, exc_info=(Exception, stderr.strip(), None))
    if len(stdout.decode().strip()) > 0:
        logging.debug("Output of python {}:\n{}".format(
            filename, stdout.strip()))
    return filename, process.returncode
Пример #5
0
def merge_json(config=CONFIG, json_version=6):
    """ Merge several JSON files from mask_config_directory
    requires a config.yml in the root of the mask directory

    Args:
        mask_config_directory: defaults to current working directory
        json_version: for maskhub parser
        jsons_filepaths: if we want to supply individual json files
        
    """
    if config.get("mask") is None:
        raise ValueError(f"mask config missing from {config['cwd']}")

    config = update_config_modules(config)

    mask_name = config["mask"]["name"]
    jsons_directory = config["gds_directory"]
    json_out_path = config["mask_directory"] / (mask_name + ".json")

    cells = {}
    does = {}
    logging.debug("Merging JSON files:")

    for filename in jsons_directory.glob("*.json"):
        logging.debug(filename)
        with open(filename, "r") as f:
            data = json.load(f)
            if data.get("type") == "doe":
                does[data["name"]] = data
            else:
                cells.update(data.get("cells"))

    config.update({"json_version": json_version, "cells": cells, "does": does})
    write_config(config, json_out_path)
    logging.info("Wrote {}".format(os.path.relpath(json_out_path)))
    return config
def parse_device_manifest(
    filepath,
    sheet_name_prefix="Device Manifest",
    row_indices=[0, 1, 2, 3],
    key_field_id=2,
):
    """
    Open device manifest file
    Returns {device_id: {attribute: value}}
    """
    book = xlrd.open_workbook(filepath)

    device_manifest_sheet_names = []

    row_indices_attributes = row_indices[:]
    try:
        row_indices_attributes.remove(key_field_id)
    except:
        print(row_indices_attributes)
        raise

    ## Find all device manifest sheets
    device_manifest_sheet_names = [
        s for s in book.sheet_names() if s.startswith(sheet_name_prefix)
    ]

    devices_dict = {}

    index_of_key_field_id = row_indices.index(key_field_id)

    for sheet_name in device_manifest_sheet_names:
        # print(sheet_name)
        device_manifest = book.sheet_by_name(sheet_name)

        rows_gen = device_manifest.get_rows()
        rows = []
        first_row = next(rows_gen)
        col_names = [
            first_row[i].value.replace(" ", "_")
            for i in row_indices_attributes
        ]
        print(sheet_name)
        print(col_names)
        print()

        for i in range(1000):
            try:
                _row = next(rows_gen)
            except:
                break
            if _row[0].value:
                rows += [[_clean_value(_row[j].value) for j in row_indices]]
                logging.debug(rows[-1][key_field_id])

        _devices_dict = {}
        for row in rows:
            device_id = row.pop(index_of_key_field_id)
            # print(row)
            _devices_dict[device_id] = {
                _c: _v
                for _c, _v in zip(col_names, row)
            }

            devices_dict.update(_devices_dict)
    return devices_dict