Exemplo n.º 1
0
def merge_json(
    doe_directory: PathType,
    json_version: int = 6,
) -> Dict[str, Any]:
    """Returns combined dict with several JSON files from doe_directory

    Args:
        doe_directory: defaults to current working directory
        json_version:

    """
    logger.debug(f"Merging JSON files from {doe_directory}")
    cells = {}

    for filename in doe_directory.glob("**/*.json"):
        logger.debug(f"merging {filename}")
        with open(filename, "r") as f:
            data = json.load(f)
            cells.update(data.get("cells"))

    does = {d.stem: json.loads(open(d).read()) for d in doe_directory.glob("**/*.json")}
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
    )
    return metadata
Exemplo n.º 2
0
def merge_yaml(
    doe_directory: PathType,
    yaml_path: Optional[PathType] = None,
    json_version: int = 6,
) -> Dict[str, Any]:
    """Combine several YAML files

    in the root of the mask directory, gets mask_name from there

    Args:
        doe_directory: defaults to current working directory
        extra_directories: list of extra_directories
        yaml_path: optional metadata path to write metadata
        json_version:

    """
    logger.debug(f"Merging JSON files from {doe_directory}")
    cells = {}

    for filename in doe_directory.glob("**/*.yml"):
        logger.debug(f"merging {filename}")
        metadata = OmegaConf.load(filename)
        metadata = OmegaConf.to_container(metadata)
        cells.update(metadata.get("cells"))

    metadata = dict(
        json_version=json_version,
        cells=cells,
    )

    if yaml_path:
        yaml_path.write_text(OmegaConf.to_yaml(metadata))
        logger.info(f"Wrote metadata in {yaml_path}")
    return metadata
Exemplo n.º 3
0
def get_import_gds_script(dirpath: PathType) -> str:
    """Returns import_gds script from a directory with all the GDS files."""
    dirpath = pathlib.Path(dirpath)
    script = [script_prefix]
    script += [f"gdsdir = {dirpath.absolute()!r}\n"]
    script += [
        "import_gds = partial(gf.import_gds, gdsdir=gdsdir, decorator=add_ports)\n"
    ]

    cells = [
        f"{clean_name(cell.stem)} = partial(import_gds, "
        f"{cell.stem + cell.suffix!r})" for cell in dirpath.glob("*.gds")
    ]
    script += sorted(cells)
    return "\n".join(script)
Exemplo n.º 4
0
def merge_test_metadata(
    labels_path: PathType,
    mask_metadata: Dict[str, Any],
    labels_prefix: str = "opt",
) -> DictConfig:
    """Returns a test metadata dict config of labeled cells
    by merging GDS labels in CSV and YAML mask metadata

    Args:
        labels_path: for test labels in CSV
        mask_metadata: dict with test metadata
        labels_prefix: only select labels with a text prefix

    .. code::

        CSV labels  -------
                          |--> merge_test_metadata dict
                          |
        YAML metatada  ----


    """
    labels_path = Path(labels_path)

    if not labels_path.exists():
        raise FileNotFoundError(f"missing CSV labels {labels_path}")

    labels_list = parse_csv_data(labels_path)
    cells_metadata = mask_metadata.get("cells", {})

    test_metadata = DictConfig({})

    for label, x, y in labels_list:
        cell = get_cell_from_label(label)

        if cell in cells_metadata:
            test_metadata[cell] = cells_metadata[cell]
            test_metadata[cell].label = dict(x=float(x),
                                             y=float(y),
                                             text=label)
        else:
            logger.error(f"missing cell metadata for {cell}")
            warnings.warn(f"missing cell metadata for {cell}")

    return test_metadata
Exemplo n.º 5
0
def gdsdir(dirpath: PathType) -> Component:
    """Merges GDS cells from a directory into a single Component"""
    dirpath = pathlib.Path(dirpath)
    return from_gdspaths(dirpath.glob("*.gds"))
Exemplo n.º 6
0
def write_sweeps(
    filepath: PathType,
    component_factory: Dict[str, Callable] = factory,
    doe_root_path: PathType = CONFIG["cache_doe_directory"],
    doe_metadata_path: PathType = CONFIG["doe_directory"],
    n_cores: int = n_cores,
    overwrite: bool = False,
    precision: float = 1e-9,
    cache: bool = False,
) -> None:
    """Generates a sweep/DOEs of components specified in a yaml file
    allows for each DOE to have its own x and y spacing (more flexible than method1)
    similar to write_doe

    Args:
        filepath: for the does.yml
        component_factory:
        doe_root_path:
        doe_metadata_path:
        n_cores: number of cores
        overwrite:
        precision: for the GDS, defaults to 1nm
        cache: if True uses cache
    """
    doe_root_path = pathlib.Path(doe_root_path)
    doe_metadata_path = pathlib.Path(doe_metadata_path)

    doe_root_path.mkdir(parents=True, exist_ok=True)
    doe_metadata_path.mkdir(parents=True, exist_ok=True)

    dicts, mask_settings = read_sweep(filepath)
    does, templates_by_type = separate_does_from_templates(dicts)

    dict_templates = (templates_by_type["template"]
                      if "template" in templates_by_type else {})

    with_cache_default = mask_settings[
        "cache"] if "cache" in mask_settings else cache

    list_args = []
    for doe_name, doe in does.items():
        doe["name"] = doe_name
        component = doe["component"]

        if component not in component_factory:
            raise ValueError(
                f"{component!r} not in {component_factory.keys()}")

        if "template" in doe:
            # The keyword template is used to enrich the dictionary from the template
            templates = doe["template"]
            if not isinstance(templates, list):
                templates = [templates]
            for template in templates:
                try:
                    doe = update_dicts_recurse(doe, dict_templates[template])
                except Exception:
                    print(template, "does not exist")
                    raise

        do_permutation = doe.pop("do_permutation")
        settings = doe["settings"]
        doe["list_settings"] = get_settings_list(do_permutation, **settings)

        list_args += [doe]

    does_running = []
    start_times = {}
    finish_times = {}
    doe_name_to_process = {}
    while list_args:
        while len(does_running) < n_cores:
            if not list_args:
                break
            doe = list_args.pop()
            doe_name = doe["name"]

            # Only launch a build process if we do not use the cache
            # Or if the DOE is not built

            list_settings = doe["list_settings"]

            with_cache = with_cache_default if "cache" not in doe else doe[
                "cache"]

            _doe_exists = False

            if "doe_template" in doe:
                # this DOE points to another existing component
                _doe_exists = True
                logger.info(f"Using template - {doe_name!r}")
                save_doe_use_template(doe)

            elif with_cache:
                _doe_exists = doe_exists(
                    doe_name=doe_name,
                    list_settings=list_settings,
                    doe_root_path=doe_root_path,
                )
                if _doe_exists:
                    logger.info("Cached - {doe_name!r}")
                    if overwrite:
                        component_names = load_doe_component_names(doe_name)

                        write_sweep_metadata(
                            doe_name=doe["name"],
                            cell_names=component_names,
                            list_settings=doe["list_settings"],
                            doe_metadata_path=doe_metadata_path,
                        )

            if not _doe_exists:
                start_times[doe_name] = time.time()
                p = Process(
                    target=write_sweep,
                    args=(doe, component_factory),
                    kwargs={
                        "doe_root_path": doe_root_path,
                        "doe_metadata_path": doe_metadata_path,
                        "overwrite": overwrite,
                        "precision": precision,
                    },
                )
                doe_name_to_process[doe_name] = p
                does_running += [doe_name]
                try:
                    p.start()
                except Exception:
                    print(f"Issue starting process for {doe_name}")
                    print(type(component_factory))
                    raise

        to_rm = []
        for i, doe_name in enumerate(does_running):
            _p = doe_name_to_process[doe_name]
            if not _p.is_alive():
                to_rm += [i]
                finish_times[doe_name] = time.time()
                dt = finish_times[doe_name] - start_times[doe_name]
                line = "Done - {} ({:.1f}s)".format(doe_name, dt)
                logger.info(line)

        for i in to_rm[::-1]:
            does_running.pop(i)

        time.sleep(0.001)

    while does_running:
        to_rm = []
        for i, _doe_name in enumerate(does_running):
            _p = doe_name_to_process[_doe_name]
            if not _p.is_alive():
                to_rm += [i]
        for i in to_rm[::-1]:
            does_running.pop(i)

        time.sleep(0.05)