Exemplo n.º 1
0
def merge_yaml(
    doe_directory: PathType,
    yaml_path: Optional[PathType] = None,
    json_version: int = 6,
) -> Dict[str, Any]:
    """Combine several YAML files

    in the root of the mask directory, gets mask_name from there

    Args:
        doe_directory: defaults to current working directory
        extra_directories: list of extra_directories
        yaml_path: optional metadata path to write metadata
        json_version:

    """
    logger.debug(f"Merging JSON files from {doe_directory}")
    cells = {}

    for filename in doe_directory.glob("**/*.yml"):
        logger.debug(f"merging {filename}")
        metadata = OmegaConf.load(filename)
        metadata = OmegaConf.to_container(metadata)
        cells.update(metadata.get("cells"))

    metadata = dict(
        json_version=json_version,
        cells=cells,
    )

    if yaml_path:
        yaml_path.write_text(OmegaConf.to_yaml(metadata))
        logger.info(f"Wrote metadata in {yaml_path}")
    return metadata
Exemplo n.º 2
0
def build_devices(regex=".*", overwrite=True):
    """Builds all the python files in devices/"""
    # Avoid accidentally rebuilding devices
    if (os.path.isdir(CONFIG["gds_directory"])
            and os.listdir(CONFIG["gds_directory"]) and not overwrite):
        print("Run `make clean` to remove already built devices.")
        sys.exit(0)

    # Collect all the files to run.
    all_files = [
        os.path.join(dp, f)
        for dp, dn, filenames in os.walk(CONFIG["devices_directory"])
        for f in filenames if os.path.splitext(f)[1] == ".py"
    ]
    all_files = sorted(all_files)
    all_files = [f for f in all_files if re.search(regex, f)]

    # Notify user
    logger.info("Building splits on {} threads. {} files to run.".format(
        multiprocessing.cpu_count(), len(all_files)))
    logger.info("Debug information at {}".format(
        os.path.relpath(os.path.join(CONFIG["log_directory"], "debug.log"))))

    # Now run all the files in batches of $CPU_SIZE.
    with Pool(processes=multiprocessing.cpu_count()) as pool:
        for filename, rc in pool.imap_unordered(run_python, all_files):
            logger.debug("Finished {} {}".format(filename, rc))

    # Report on what we did.
    devices = glob(os.path.join(CONFIG["gds_directory"], "*.gds"))
    countmsg = "There are now {} GDS files in {}.".format(
        len(devices), os.path.relpath(CONFIG["gds_directory"]))
    logger.info(f"Finished building devices. {countmsg}")
Exemplo n.º 3
0
def merge_json(
    doe_directory: PathType,
    json_version: int = 6,
) -> Dict[str, Any]:
    """Returns combined dict with several JSON files from doe_directory

    Args:
        doe_directory: defaults to current working directory
        json_version:

    """
    logger.debug(f"Merging JSON files from {doe_directory}")
    cells = {}

    for filename in doe_directory.glob("**/*.json"):
        logger.debug(f"merging {filename}")
        with open(filename, "r") as f:
            data = json.load(f)
            cells.update(data.get("cells"))

    does = {d.stem: json.loads(open(d).read()) for d in doe_directory.glob("**/*.json")}
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
    )
    return metadata
Exemplo n.º 4
0
def merge_json(
    doe_directory: Path = CONFIG["doe_directory"],
    gds_directory: Path = CONFIG["gds_directory"],
    extra_directories: Optional[Iterable[Path]] = None,
    jsonpath: Path = CONFIG["mask_directory"] / "metadata.json",
    json_version: int = 6,
    config: DictConfig = TECH,
) -> Dict[str, Any]:
    """Combine several JSON files from config.yml
    in the root of the mask directory, gets mask_name from there

    Args:
        doe_directory: defaults to current working directory
        extra_directories: list of extra_directories
        jsonpath
        json_version:
        config

    """
    logger.debug("Merging JSON files:")
    cells = {}
    extra_directories = extra_directories or []
    config = dataclasses.asdict(config)
    config.pop("library", "")

    for directory in extra_directories + [doe_directory]:
        for filename in directory.glob("*/*.json"):
            logger.debug(filename)
            with open(filename, "r") as f:
                data = json.load(f)
                cells.update(data.get("cells"))

    does = {
        d.stem: json.loads(open(d).read())
        for d in doe_directory.glob("*.json")
    }
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
        config=config,
    )

    write_config(metadata, jsonpath)
    logger.info(f"Wrote  metadata in {jsonpath}")
    return metadata
Exemplo n.º 5
0
def run_python(filename):
    """Run a python script and keep track of some context"""
    logger.debug("Running `{}`.".format(filename))
    command = ["python", filename]

    # Run the process
    t = time.time()
    process = Popen(command, stdout=PIPE, stderr=PIPE)
    stdout, _ = process.communicate()
    total_time = time.time() - t
    if process.returncode == 0:
        logger.info("v {} ({:.1f}s)".format(os.path.relpath(filename), total_time))
    else:
        logger.info(
            "! Error in {} {:.1f}s)".format(os.path.relpath(filename), total_time)
        )
        # message = "! Error in `{}`".format(basename(filename))
        # logger.error(message, exc_info=(Exception, stderr.strip(), None))
    if len(stdout.decode().strip()) > 0:
        logger.debug("Output of python {}:\n{}".format(filename, stdout.strip()))
    return filename, process.returncode