예제 #1
0
def write_cells_recursively(
    cell: gdspy.Cell,
    unit: float = 1e-6,
    precision: float = 1e-9,
    timestamp: Optional[datetime.datetime] = _timestamp2019,
    dirpath: pathlib.Path = Optional[pathlib.Path],
):
    """Write gdspy cells recursively

    Args:
        cell: gdspy cell
        unit: unit size for objects in library. 1um by default.
        precision: for object dimensions in the library (m). 1nm by default.
        timestamp: Defaults to 2019-10-25. If None uses current time.
        dirpath: directory for the GDS file
    """
    dirpath = dirpath or pathlib.Path.cwd()

    for cell in cell.get_dependencies():
        gdspath = f"{pathlib.Path(dirpath)/cell.name}.gds"
        lib = gdspy.GdsLibrary(unit=unit, precision=precision)
        lib.write_gds(gdspath, cells=[cell], timestamp=timestamp)
        logger.info(f"Write GDS to {gdspath}")

        if cell.get_dependencies():
            write_cells_recursively(
                cell=cell,
                unit=unit,
                precision=precision,
                timestamp=timestamp,
                dirpath=dirpath,
            )
예제 #2
0
def merge_yaml(
    doe_directory: PathType,
    yaml_path: Optional[PathType] = None,
    json_version: int = 6,
) -> Dict[str, Any]:
    """Combine several YAML files

    in the root of the mask directory, gets mask_name from there

    Args:
        doe_directory: defaults to current working directory
        extra_directories: list of extra_directories
        yaml_path: optional metadata path to write metadata
        json_version:

    """
    logger.debug(f"Merging JSON files from {doe_directory}")
    cells = {}

    for filename in doe_directory.glob("**/*.yml"):
        logger.debug(f"merging {filename}")
        metadata = OmegaConf.load(filename)
        metadata = OmegaConf.to_container(metadata)
        cells.update(metadata.get("cells"))

    metadata = dict(
        json_version=json_version,
        cells=cells,
    )

    if yaml_path:
        yaml_path.write_text(OmegaConf.to_yaml(metadata))
        logger.info(f"Wrote metadata in {yaml_path}")
    return metadata
예제 #3
0
def test_autoplacer():
    shutil.rmtree(build_path, ignore_errors=True)
    mask_path.mkdir(parents=True, exist_ok=True)

    # Map the component library names in the YAML file to the component library
    name2factory = {"spiral": spiral}

    logger.add(sink=logpath)
    logger.info("writring does to", doe_root_path)
    generate_does(
        str(config_yml),
        component_factory=name2factory,
        doe_root_path=doe_root_path,
        doe_metadata_path=doe_metadata_path,
    )
    top_level = place_from_yaml(config_yml, doe_root_path)
    top_level.write(str(gdspath))

    merge_metadata(gdspath=gdspath)

    assert gdspath.exists()
    assert markdown_path.exists()
    assert json_path.exists()
    assert test_metadata_path.exists()

    report = open(markdown_path).read()
    assert report.count("#") >= 1, f" only {report.count('#')} DOEs in {markdown_path}"
    return gdspath
예제 #4
0
def write_drc_deck_macro(
    name="generic",
    filepath: Optional[PathType] = None,
    shortcut: str = "Ctrl+Shift+D",
    **kwargs,
) -> str:
    """Write script for klayout rule deck

    Args:
        name: drc rule deck name
        filepath: Optional macro path (defaults to .klayout/drc/name.lydrc)

    Keyword Args:
        rules: list of rules
        layer_map: layer definitions can be dict or dataclass

    Keyword Args:
        rules: list of rules
        layer_map: layer definitions can be dict or dataclass

    """
    script = f"""<?xml version="1.0" encoding="utf-8"?>
<klayout-macro>
 <description>{name} DRC</description>
 <version/>
 <category>drc</category>
 <prolog/>
 <epilog/>
 <doc/>
 <autorun>false</autorun>
 <autorun-early>false</autorun-early>
 <shortcut>{shortcut}</shortcut>
 <show-in-menu>true</show-in-menu>
 <group-name>drc_scripts</group-name>
 <menu-path>tools_menu.drc.end</menu-path>
 <interpreter>dsl</interpreter>
 <dsl-interpreter-name>drc-dsl-xml</dsl-interpreter-name>
 <text># {name} DRC

# Read about DRC scripts in the User Manual under "Design Rule Check (DRC)"
# Based on SOEN pdk https://github.com/usnistgov/SOEN-PDK/tree/master/tech/OLMAC
# http://klayout.de/doc/manual/drc_basic.html

report("generic DRC")
tiles(100)
tile_borders(2)
threads(3)
"""
    script += write_drc_deck(**kwargs)

    script += """
</text>
</klayout-macro>
"""
    filepath = filepath or get_klayout_path() / "drc" / f"{name}.lydrc"
    filepath = pathlib.Path(filepath)
    filepath.write_text(script)
    logger.info(f"Wrote DRC deck to {filepath}")
    return script
예제 #5
0
파일: build.py 프로젝트: tvt173/gdsfactory
def build_cache_pull():
    """Pull devices from the cache"""
    if CONFIG.get("cache_url"):
        logger.info("Loading devices from cache...")
        check_call([
            "rsync",
            "-rv",
            "--delete",
            CONFIG["cache_url"],
            CONFIG["build_directory"] + "/",
        ])
예제 #6
0
파일: build.py 프로젝트: tvt173/gdsfactory
def build_cache_push():
    """Push devices to the cache"""
    if not os.listdir(CONFIG["build_directory"]):
        logger.info("Nothing to push")
        return

    if CONFIG.get("cache_url"):
        logger.info("Uploading devices to cache...")
        check_call([
            "rsync",
            "-rv",
            CONFIG["build_directory"] + "/",
            CONFIG["cache_url"],
            "--delete",
        ])
예제 #7
0
def merge_json(
    doe_directory: Path = CONFIG["doe_directory"],
    gds_directory: Path = CONFIG["gds_directory"],
    extra_directories: Optional[Iterable[Path]] = None,
    jsonpath: Path = CONFIG["mask_directory"] / "metadata.json",
    json_version: int = 6,
    config: DictConfig = TECH,
) -> Dict[str, Any]:
    """Combine several JSON files from config.yml
    in the root of the mask directory, gets mask_name from there

    Args:
        doe_directory: defaults to current working directory
        extra_directories: list of extra_directories
        jsonpath
        json_version:
        config

    """
    logger.debug("Merging JSON files:")
    cells = {}
    extra_directories = extra_directories or []
    config = dataclasses.asdict(config)
    config.pop("library", "")

    for directory in extra_directories + [doe_directory]:
        for filename in directory.glob("*/*.json"):
            logger.debug(filename)
            with open(filename, "r") as f:
                data = json.load(f)
                cells.update(data.get("cells"))

    does = {
        d.stem: json.loads(open(d).read())
        for d in doe_directory.glob("*.json")
    }
    metadata = dict(
        json_version=json_version,
        cells=cells,
        does=does,
        config=config,
    )

    write_config(metadata, jsonpath)
    logger.info(f"Wrote  metadata in {jsonpath}")
    return metadata
예제 #8
0
def run_simulation(sim: td.Simulation,
                   dirpath=PATH.results_tidy3d) -> Awaitable[td.Simulation]:
    """Returns a simulation with simulation results

    Only submits simulation if results not found locally or remotely.

    First tries to load simulation results from disk.
    Then it tries to load them from the server storage.
    Finally, only submits simulation if not found


    .. code::
        import gdsfactory.simulation.tidy3d as gm

        component = gf.components.straight(length=3)
        sim = gm.get_simulation(component=component)
        sim = run_simulation(sim).result()

    """
    td.logging_level("error")
    sim_hash = get_sim_hash(sim)
    sim_path = dirpath / f"{sim_hash}.hdf5"
    logger.info(f"running simulation {sim_hash}")

    hash_to_id = {
        d["task_name"][:32]: d["task_id"]
        for d in web.get_last_projects()
    }
    target = PATH.results_tidy3d / f"{sim_hash}.hdf5"

    # Try from local storage
    if sim_path.exists():
        logger.info(f"{sim_path} found in local storage")
        sim = _executor.submit(load_results, sim, target)

    # Try from server storage
    elif sim_hash in hash_to_id:
        task_id = hash_to_id[sim_hash]
        sim = _executor.submit(load_results, sim, target, task_id)

    # Only submit if simulation not found
    else:
        task_id = _export_simulation(sim=sim, task_name=sim_hash)
        sim = _executor.submit(load_results, sim, target, task_id)
    return sim
예제 #9
0
def write_cells(
    gdspath: Optional[PathType] = None,
    cell: Optional[gdspy.Cell] = None,
    dirpath: Optional[PathType] = None,
    unit: float = 1e-6,
    precision: float = 1e-9,
    timestamp: Optional[datetime.datetime] = _timestamp2019,
    recursively: bool = True,
) -> None:
    """Writes cells into separate GDS files.

    Args:
        gdspath: GDS file. You need to define either gdspath or cell.
        cell: gdspy cell. You need to define either gdspath or cell.
        unit: unit size for objects in library. 1um by default.
        precision: for object dimensions in the library (m). 1nm by default.
        timestamp: Defaults to 2019-10-25. If None uses current time.
        dirpath: directory for the GDS file. Defaults to current working directory.
        recursively: writes all cells recursively. If False writes only top cells.
    """
    if cell is None and gdspath is None:
        raise ValueError("You need to specify component or gdspath")

    gdsii_lib = gdspy.GdsLibrary()
    gdsii_lib.read_gds(gdspath)
    top_level_cells = gdsii_lib.top_level()

    dirpath = dirpath or pathlib.Path.cwd()
    dirpath = pathlib.Path(dirpath)
    dirpath.mkdir(exist_ok=True, parents=True)

    for cell in top_level_cells:
        gdspath = f"{pathlib.Path(dirpath)/cell.name}.gds"
        lib = gdspy.GdsLibrary(unit=unit, precision=precision)
        lib.write_gds(gdspath, cells=[cell], timestamp=timestamp)
        logger.info(f"Write GDS to {gdspath}")

        if recursively:
            write_cells_recursively(
                cell=cell,
                unit=unit,
                precision=precision,
                timestamp=timestamp,
                dirpath=dirpath,
            )
예제 #10
0
def run_python(filename):
    """Run a python script and keep track of some context"""
    logger.debug("Running `{}`.".format(filename))
    command = ["python", filename]

    # Run the process
    t = time.time()
    process = Popen(command, stdout=PIPE, stderr=PIPE)
    stdout, _ = process.communicate()
    total_time = time.time() - t
    if process.returncode == 0:
        logger.info("v {} ({:.1f}s)".format(os.path.relpath(filename), total_time))
    else:
        logger.info(
            "! Error in {} {:.1f}s)".format(os.path.relpath(filename), total_time)
        )
        # message = "! Error in `{}`".format(basename(filename))
        # logger.error(message, exc_info=(Exception, stderr.strip(), None))
    if len(stdout.decode().strip()) > 0:
        logger.debug("Output of python {}:\n{}".format(filename, stdout.strip()))
    return filename, process.returncode
예제 #11
0
파일: build.py 프로젝트: tvt173/gdsfactory
def build_devices(regex=".*", overwrite=True):
    """Builds all the python files in devices/"""
    # Avoid accidentally rebuilding devices
    if (os.path.isdir(CONFIG["gds_directory"])
            and os.listdir(CONFIG["gds_directory"]) and not overwrite):
        print("Run `make clean` to remove already built devices.")
        sys.exit(0)

    # Collect all the files to run.
    all_files = [
        os.path.join(dp, f)
        for dp, dn, filenames in os.walk(CONFIG["devices_directory"])
        for f in filenames if os.path.splitext(f)[1] == ".py"
    ]
    all_files = sorted(all_files)
    all_files = [f for f in all_files if re.search(regex, f)]

    # Notify user
    logger.info("Building splits on {} threads. {} files to run.".format(
        multiprocessing.cpu_count(), len(all_files)))
    logger.info("Debug information at {}".format(
        os.path.relpath(os.path.join(CONFIG["log_directory"], "debug.log"))))

    # Now run all the files in batches of $CPU_SIZE.
    with Pool(processes=multiprocessing.cpu_count()) as pool:
        for filename, rc in pool.imap_unordered(run_python, all_files):
            logger.debug("Finished {} {}".format(filename, rc))

    # Report on what we did.
    devices = glob(os.path.join(CONFIG["gds_directory"], "*.gds"))
    countmsg = "There are now {} GDS files in {}.".format(
        len(devices), os.path.relpath(CONFIG["gds_directory"]))
    logger.info(f"Finished building devices. {countmsg}")
예제 #12
0
def merge_markdown(
    reports_directory: Path = CONFIG["doe_directory"],
    mdpath: Path = CONFIG["mask_directory"] / "report.md",
    **kwargs,
) -> None:
    """Merges all individual markdown reports (.md) into a single markdown
    you can add a report:[Capacitors, Diodes...] in config.yml to define the merge order
    """
    logger.info("Merging Markdown files:")
    configpath = mdpath.with_suffix(".yml")
    tech = dataclasses.asdict(TECH)
    tech.pop("library", "")

    with open(configpath, "w") as f:
        tech.update(**kwargs)
        tech_omegaconf = OmegaConf.create(tech)
        f.write(OmegaConf.to_yaml(tech_omegaconf))

    with open(mdpath, "w") as f:

        def wl(line="", eol="\n"):
            f.write(line + eol)

        reports = sorted(glob(os.path.join(reports_directory, "*.md")))
        for filename in reports:
            with open(filename) as infile:
                for line in infile:
                    f.write(line)

    logger.info(f"Wrote {mdpath}")
    logger.info(f"Wrote {configpath}")
예제 #13
0
def test_autoplacer():
    from gdsfactory.autoplacer.yaml_placer import place_from_yaml

    shutil.rmtree(build_path, ignore_errors=True)
    mask_path.mkdir(parents=True, exist_ok=True)

    # Map the component library names in the YAML file to the component library
    name2factory = {"spiral": spiral}

    logger.add(sink=logpath)
    logger.info("writring does to", doe_root_path)
    write_sweeps(
        str(config_yml),
        component_factory=name2factory,
        doe_root_path=doe_root_path,
        doe_metadata_path=doe_metadata_path,
    )
    top_level = place_from_yaml(config_yml, doe_root_path)
    top_level.write(str(gdspath))

    assert gdspath.exists()
    return gdspath
예제 #14
0
def _export_simulation(
    sim: td.Simulation,
    task_name: Optional[str] = None,
    folder_name: str = "default",
    draft: bool = False,
) -> int:
    """Exports simulation to web and returns task_id.

    Args:
        sim: simulation object
        task_name:
        folder_name: Server folder to hold the task.
        draft: If ``True``, the project will be submitted but not run.
            It can then be visualized in the web UI and run from there when needed.

    """
    project = web.new_project(sim.export(),
                              task_name=task_name,
                              folder_name=folder_name,
                              draft=draft)
    task_id = project["taskId"]
    logger.info(f"submitting {task_id}")
    return task_id
예제 #15
0
파일: show.py 프로젝트: simbilod/gdsfactory
def show(component: Union[Component, str, pathlib.Path],
         clear_cache: bool = True,
         **kwargs) -> None:
    """Write GDS and show Component in klayout

    Args:
        component
        clear_cache: clear_cache after showing the component

    Keyword Args:
        gdspath: GDS file path to write to.
        gdsdir: directory for the GDS file. Defaults to /tmp/
        unit: unit size for objects in library. 1um by default.
        precision: for object dimensions in the library (m). 1nm by default.
        timestamp: Defaults to 2019-10-25. If None uses current time.

    """
    if isinstance(component, pathlib.Path):
        component = str(component)
        return klive.show(component)
    elif isinstance(component, str):
        return klive.show(component)
    elif component is None:
        raise ValueError(
            "Component is None, make sure that your function returns the component"
        )

    elif isinstance(component, Component):
        gdspath = component.write_gds(logging=False, **kwargs)
        klive.show(gdspath)
        logger.info(f"Klayout show {component!r}")
    else:
        raise ValueError(
            f"Component is {type(component)}, make sure pass a Component or a path"
        )
    if clear_cache:
        clear_cache_function()
예제 #16
0
def write_sparameters_meep_mpi(
    component: Component,
    cores: int = ncores,
    filepath: Optional[Path] = None,
    dirpath: Path = sparameters_path,
    layer_stack: LayerStack = LAYER_STACK,
    temp_dir: Path = temp_dir_default,
    temp_file_str: str = "write_sparameters_meep_mpi",
    overwrite: bool = False,
    wait_to_finish: bool = True,
    **kwargs,
) -> Path:
    """Write Sparameters using multiple cores and MPI
    and returns Sparameters CSV filepath.

    Simulates each time using a different input port (by default, all of them)
    unless you specify port_symmetries:

    port_symmetries = {"o1":
            {
                "s11": ["s22","s33","s44"],
                "s21": ["s21","s34","s43"],
                "s31": ["s13","s24","s42"],
                "s41": ["s14","s23","s32"],
            }
        }

    Args:
        component: gdsfactory Component.
        cores: number of processors.
        filepath: to store pandas Dataframe with Sparameters in CSV format.
            Defaults to dirpath/component_.csv
        dirpath: directory to store Sparameters
        layer_stack:
        temp_dir: temporary directory to hold simulation files.
        temp_file_str: names of temporary files in temp_dir.
        overwrite: overwrites stored simulation results.
        wait_to_finish:

    Keyword Args:
        resolution: in pixels/um (20: for coarse, 120: for fine)
        port_symmetries: Dict to specify port symmetries, to save number of simulations
        source_ports: list of port string names to use as sources
        dirpath: directory to store Sparameters
        layer_stack: LayerStack class
        port_margin: margin on each side of the port
        port_monitor_offset: offset between monitor GDS port and monitor MEEP port
        port_source_offset: offset between source GDS port and source MEEP port
        filepath: to store pandas Dataframe with Sparameters in CSV format.
        animate: saves a MP4 images of the simulation for inspection, and also
            outputs during computation. The name of the file is the source index
        lazy_parallelism: toggles the flag "meep.divide_parallel_processes" to
            perform the simulations with different sources in parallel
        dispersive: use dispersive models for materials (requires higher resolution)
        extend_ports_length: to extend ports beyond the PML
        layer_stack: Dict of layer number (int, int) to thickness (um)
        zmargin_top: thickness for cladding above core
        zmargin_bot: thickness for cladding below core
        tpml: PML thickness (um)
        clad_material: material for cladding
        is_3d: if True runs in 3D
        wl_min: wavelength min (um)
        wl_max: wavelength max (um)
        wl_steps: wavelength steps
        dfcen: delta frequency
        port_source_name: input port name
        port_field_monitor_name:
        port_margin: margin on each side of the port
        distance_source_to_monitors: in (um) source goes before
        port_source_offset: offset between source GDS port and source MEEP port
        port_monitor_offset: offset between monitor GDS port and monitor MEEP port

    Returns:
        filepath for sparameters CSV (wavelengths, s11a, s12m, ...)
            where `a` is the angle in radians and `m` the module
    """
    settings = remove_simulation_kwargs(kwargs)
    filepath = filepath or get_sparameters_path(
        component=component,
        dirpath=dirpath,
        layer_stack=layer_stack,
        **settings,
    )
    filepath = pathlib.Path(filepath)
    if filepath.exists() and not overwrite:
        logger.info(f"Simulation {filepath!r} already exists")
        return filepath

    # Save the component object to simulation for later retrieval
    temp_dir.mkdir(exist_ok=True, parents=True)
    tempfile = temp_dir / temp_file_str
    component_file = tempfile.with_suffix(".pkl")
    kwargs.update(filepath=str(filepath))

    with open(component_file, "wb") as outp:
        pickle.dump(component, outp, pickle.HIGHEST_PROTOCOL)

    # Write execution file
    script_lines = [
        "import pickle\n",
        "from gdsfactory.simulation.gmeep import write_sparameters_meep\n\n",
        'if __name__ == "__main__":\n\n',
        f"\twith open(\"{component_file}\", 'rb') as inp:\n",
        "\t\tcomponent = pickle.load(inp)\n\n"
        "\twrite_sparameters_meep(component = component,\n",
    ]
    for key in kwargs.keys():
        script_lines.append(f"\t\t{key} = {kwargs[key]!r},\n")

    script_lines.append("\t)")
    script_file = tempfile.with_suffix(".py")
    script_file_obj = open(script_file, "w")
    script_file_obj.writelines(script_lines)
    script_file_obj.close()

    command = f"mpirun -np {cores} python {script_file}"
    logger.info(command)
    logger.info(str(filepath))

    subprocess.Popen(
        shlex.split(command),
        shell=False,
        stdin=subprocess.PIPE,
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE,
    )
    if wait_to_finish:
        while not filepath.exists():
            time.sleep(1)

    return filepath
예제 #17
0
def write_sparameters_meep(
    component: Component,
    port_symmetries: Optional[PortSymmetries] = None,
    resolution: int = 20,
    wl_min: float = 1.5,
    wl_max: float = 1.6,
    wl_steps: int = 50,
    dirpath: Path = sparameters_path,
    layer_stack: LayerStack = LAYER_STACK,
    port_margin: float = 2,
    port_monitor_offset: float = -0.1,
    port_source_offset: float = -0.1,
    filepath: Optional[Path] = None,
    overwrite: bool = False,
    animate: bool = False,
    lazy_parallelism: bool = False,
    run: bool = True,
    dispersive: bool = False,
    xmargin: float = 0,
    ymargin: float = 0,
    xmargin_left: float = 0,
    xmargin_right: float = 0,
    ymargin_top: float = 0,
    ymargin_bot: float = 0,
    **settings,
) -> pd.DataFrame:
    r"""Compute Sparameters and writes them to a CSV filepath.
    Simulates each time using a different input port (by default, all of them)
    unless you specify port_symmetries:

    port_symmetries = {"o1":
            {
                "s11": ["s22","s33","s44"],
                "s21": ["s21","s34","s43"],
                "s31": ["s13","s24","s42"],
                "s41": ["s14","s23","s32"],
            }
        }
    - Only simulations using the outer key port names will be run
    - The associated value is another dict whose keys are the S-parameters computed
        when this source is active
    - The values of this inner Dict are lists of s-parameters whose values are copied

    This allows you doing less simulations

    TODO: automate this for common component types
    (geometrical symmetries, reciprocal materials, etc.)

    TODO: enable other port naming conventions, such as (in0, in1, out0, out1)


    .. code::

         top view
              ________________________________
             |                               |
             | xmargin_left                  | port_extension
             |<------>          port_margin ||<-->
          ___|___________          _________||___
             |           \        /          |
             |            \      /           |
             |             ======            |
             |            /      \           |
          ___|___________/        \__________|___
             |   |                 <-------->|
             |   |ymargin_bot   xmargin_right|
             |   |                           |
             |___|___________________________|

        side view
              ________________________________
             |                     |         |
             |                     |         |
             |                   zmargin_top |
             |ymargin              |         |
             |<---> _____         _|___      |
             |     |     |       |     |     |
             |     |     |       |     |     |
             |     |_____|       |_____|     |
             |       |                       |
             |       |                       |
             |       |zmargin_bot            |
             |       |                       |
             |_______|_______________________|



    Args:
        component: to simulate.
        resolution: in pixels/um (20: for coarse, 120: for fine)
        port_symmetries: Dict to specify port symmetries, to save number of simulations
        source_ports: list of port string names to use as sources
        dirpath: directory to store Sparameters
        layer_stack: LayerStack class
        port_margin: margin on each side of the port
        port_monitor_offset: offset between monitor Component port and monitor MEEP port
        port_source_offset: offset between source Component port and source MEEP port
        filepath: to store pandas Dataframe with Sparameters in CSV format.
            Defaults to dirpath/component_.csv
        overwrite: overwrites stored simulation results.
        animate: saves a MP4 images of the simulation for inspection, and also
            outputs during computation. The name of the file is the source index
        lazy_parallelism: toggles the flag "meep.divide_parallel_processes" to
            perform the simulations with different sources in parallel
        run: runs simulation, if False, only plots simulation
        dispersive: use dispersive models for materials (requires higher resolution)
        xmargin: left and right distance from component to PML.
        xmargin_left: west distance from component to PML.
        xmargin_right: east distance from component to PML.
        ymargin: top and bottom distance from component to PML.
        ymargin_top: north distance from component to PML.
        ymargin_bot: south distance from component to PML.

    keyword Args:
        extend_ports_length: to extend ports beyond the PML (um).
        zmargin_top: thickness for cladding above core (um).
        zmargin_bot: thickness for cladding below core (um)
        tpml: PML thickness (um).
        clad_material: material for cladding.
        is_3d: if True runs in 3D
        wl_min: wavelength min (um).
        wl_max: wavelength max (um).
        wl_steps: wavelength steps
        dfcen: delta frequency
        port_source_name: input port name
        port_field_monitor_name:
        port_margin: margin on each side of the port (um).
        distance_source_to_monitors: in (um).
        port_source_offset: offset between source Component port and source MEEP port
        port_monitor_offset: offset between monitor Component port and monitor MEEP port

    Returns:
        sparameters in a pandas Dataframe (wavelengths, s11a, s12m, ...)
            where `a` is the angle in radians and `m` the module

    """

    port_symmetries = port_symmetries or {}

    xmargin_left = xmargin_left or xmargin
    xmargin_right = xmargin_right or xmargin

    ymargin_top = ymargin_top or ymargin
    ymargin_bot = ymargin_bot or ymargin

    sim_settings = dict(
        resolution=resolution,
        port_symmetries=port_symmetries,
        wl_min=wl_min,
        wl_max=wl_max,
        wl_steps=wl_steps,
        port_margin=port_margin,
        port_monitor_offset=port_monitor_offset,
        port_source_offset=port_source_offset,
        dispersive=dispersive,
        ymargin_top=ymargin_top,
        ymargin_bot=ymargin_bot,
        xmargin_left=xmargin_left,
        xmargin_right=xmargin_right,
        **settings,
    )

    filepath = filepath or get_sparameters_path(
        component=component,
        dirpath=dirpath,
        layer_stack=layer_stack,
        **sim_settings,
    )

    sim_settings = sim_settings.copy()
    sim_settings["layer_stack"] = layer_stack.to_dict()
    sim_settings["component"] = component.to_dict()
    filepath = pathlib.Path(filepath)
    filepath_sim_settings = filepath.with_suffix(".yml")

    # filepath_sim_settings.write_text(OmegaConf.to_yaml(sim_settings))
    # logger.info(f"Write simulation settings to {filepath_sim_settings!r}")
    # return filepath_sim_settings

    component = gf.add_padding_container(
        component,
        default=0,
        top=ymargin_top,
        bottom=ymargin_bot,
        left=xmargin_left,
        right=xmargin_right,
    )

    if not run:
        sim_dict = get_simulation(
            component=component,
            wl_min=wl_min,
            wl_max=wl_max,
            wl_steps=wl_steps,
            layer_stack=layer_stack,
            port_margin=port_margin,
            port_monitor_offset=port_monitor_offset,
            port_source_offset=port_source_offset,
            **settings,
        )
        sim_dict["sim"].plot2D(plot_eps_flag=True)
        plt.show()
        return

    if filepath.exists() and not overwrite:
        logger.info(f"Simulation loaded from {filepath!r}")
        return pd.read_csv(filepath)

    # Parse ports (default)
    monitor_indices = []
    source_indices = []
    component_ref = component.ref()
    for port_name in component_ref.ports.keys():
        if component_ref.ports[port_name].port_type == "optical":
            monitor_indices.append(re.findall("[0-9]+", port_name)[0])
    if bool(port_symmetries):  # user-specified
        for port_name in port_symmetries.keys():
            source_indices.append(re.findall("[0-9]+", port_name)[0])
    else:  # otherwise cycle through all
        source_indices = monitor_indices

    # Create S-parameter storage object
    sp = {}

    @pydantic.validate_arguments
    def sparameter_calculation(
        n,
        component: Component,
        port_symmetries: Optional[PortSymmetries] = port_symmetries,
        monitor_indices: Tuple = monitor_indices,
        wl_min: float = wl_min,
        wl_max: float = wl_max,
        wl_steps: int = wl_steps,
        dirpath: Path = dirpath,
        animate: bool = animate,
        dispersive: bool = dispersive,
        **settings,
    ) -> Dict:

        sim_dict = get_simulation(
            component=component,
            port_source_name=f"o{monitor_indices[n]}",
            resolution=resolution,
            wl_min=wl_min,
            wl_max=wl_max,
            wl_steps=wl_steps,
            port_margin=port_margin,
            port_monitor_offset=port_monitor_offset,
            port_source_offset=port_source_offset,
            dispersive=dispersive,
            **settings,
        )

        sim = sim_dict["sim"]
        monitors = sim_dict["monitors"]
        # freqs = sim_dict["freqs"]
        # wavelengths = 1 / freqs
        # print(sim.resolution)

        # Make termination when field decayed enough across ALL monitors
        termination = []
        for monitor_name in monitors:
            termination.append(
                mp.stop_when_fields_decayed(
                    dt=50,
                    c=mp.Ez,
                    pt=monitors[monitor_name].regions[0].center,
                    decay_by=1e-9,
                ))

        if animate:
            sim.use_output_directory()
            animate = mp.Animate2D(
                sim,
                fields=mp.Ez,
                realtime=True,
                field_parameters={
                    "alpha": 0.8,
                    "cmap": "RdBu",
                    "interpolation": "none",
                },
                eps_parameters={"contour": True},
                normalize=True,
            )
            sim.run(mp.at_every(1, animate), until_after_sources=termination)
            animate.to_mp4(30, monitor_indices[n] + ".mp4")
        else:
            sim.run(until_after_sources=termination)
        # call this function every 50 time spes
        # look at simulation and measure Ez component
        # when field_monitor_point decays below a certain 1e-9 field threshold

        # Calculate mode overlaps
        # Get source monitor results
        component_ref = component.ref()
        source_entering, source_exiting = parse_port_eigenmode_coeff(
            monitor_indices[n], component_ref.ports, sim_dict)
        # Get coefficients
        for monitor_index in monitor_indices:
            j = monitor_indices[n]
            i = monitor_index
            if monitor_index == monitor_indices[n]:
                sii = source_exiting / source_entering
                siia = np.unwrap(np.angle(sii))
                siim = np.abs(sii)
                sp[f"s{i}{i}a"] = siia
                sp[f"s{i}{i}m"] = siim
            else:
                monitor_entering, monitor_exiting = parse_port_eigenmode_coeff(
                    monitor_index, component_ref.ports, sim_dict)
                sij = monitor_exiting / source_entering
                sija = np.unwrap(np.angle(sij))
                sijm = np.abs(sij)
                sp[f"s{i}{j}a"] = sija
                sp[f"s{i}{j}m"] = sijm
                sij = monitor_entering / source_entering
                sija = np.unwrap(np.angle(sij))
                sijm = np.abs(sij)

        if bool(port_symmetries) is True:
            for key in port_symmetries[f"o{monitor_indices[n]}"].keys():
                values = port_symmetries[f"o{monitor_indices[n]}"][key]
                for value in values:
                    sp[f"{value}m"] = sp[f"{key}m"]
                    sp[f"{value}a"] = sp[f"{key}a"]

        return sp

    # Since source is defined upon sim object instanciation, loop here
    # for port_index in monitor_indices:

    num_sims = len(port_symmetries.keys()) or len(source_indices)
    if lazy_parallelism:
        from mpi4py import MPI

        cores = min([num_sims, multiprocessing.cpu_count()])
        n = mp.divide_parallel_processes(cores)
        comm = MPI.COMM_WORLD
        size = comm.Get_size()
        rank = comm.Get_rank()

        sp = sparameter_calculation(
            n,
            component=component,
            port_symmetries=port_symmetries,
            wl_min=wl_min,
            wl_max=wl_max,
            wl_steps=wl_steps,
            animate=animate,
            monitor_indices=monitor_indices,
            **settings,
        )
        # Synchronize dicts
        if rank == 0:
            for i in range(1, size, 1):
                data = comm.recv(source=i, tag=11)
                sp.update(data)

            df = pd.DataFrame(sp)
            df["wavelengths"] = np.linspace(wl_min, wl_max, wl_steps)
            df["freqs"] = 1 / df["wavelengths"]
            df.to_csv(filepath, index=False)
            logger.info(f"Write simulation results to {filepath!r}")
            filepath_sim_settings.write_text(OmegaConf.to_yaml(sim_settings))
            logger.info(
                f"Write simulation settings to {filepath_sim_settings!r}")
            return df
        else:
            comm.send(sp, dest=0, tag=11)

    else:
        for n in tqdm(range(num_sims)):
            sp.update(
                sparameter_calculation(
                    n,
                    component=component,
                    port_symmetries=port_symmetries,
                    wl_min=wl_min,
                    wl_max=wl_max,
                    wl_steps=wl_steps,
                    animate=animate,
                    monitor_indices=monitor_indices,
                    **settings,
                ))
        df = pd.DataFrame(sp)
        df["wavelengths"] = np.linspace(wl_min, wl_max, wl_steps)
        df["freqs"] = 1 / df["wavelengths"]
        df.to_csv(filepath, index=False)

        logger.info(f"Write simulation results to {filepath!r}")
        filepath_sim_settings.write_text(OmegaConf.to_yaml(sim_settings))
        logger.info(f"Write simulation settings to {filepath_sim_settings!r}")
        return df
예제 #18
0
def write_sparameters_lumerical(
    component: ComponentOrFactory,
    session: Optional[object] = None,
    run: bool = True,
    overwrite: bool = False,
    dirpath: Path = gf.CONFIG["sparameters"],
    layer_stack: LayerStack = LAYER_STACK,
    simulation_settings: SimulationSettings = SIMULATION_SETTINGS,
    **settings,
) -> pd.DataFrame:
    """Returns and writes component Sparameters using Lumerical FDTD.

    If simulation exists it returns the Sparameters directly unless overwrite=True
    which forces a re-run of the simulation

    Lumerical units are in meters while gdsfactory units are in um

    Writes Sparameters both in .CSV and .DAT (interconnect format) as well as
    simulation settings in YAML

    In the CSV format you can see `S12m` where `m` stands for magnitude
    and `S12a` where `a` stands for angle in radians

    Your components need to have ports, that will extend over the PML.

    .. image:: https://i.imgur.com/dHAzZRw.png

    For your Fab technology you can overwrite

    - Simulation Settings
    - dirpath
    - layerStack

    Args:
        component: Component to simulate
        session: you can pass a session=lumapi.FDTD() or it will create one
        run: True runs Lumerical, False only draws simulation
        overwrite: run even if simulation results already exists
        dirpath: where to store the Sparameters
        layer_stack: layer_stack
        simulation_settings: dataclass with all simulation_settings
        **settings: overwrite any simulation settings
            background_material: for the background
            port_margin: on both sides of the port width (um)
            port_height: port height (um)
            port_extension: port extension (um)
            mesh_accuracy: 2 (1: coarse, 2: fine, 3: superfine)
            zmargin: for the FDTD region 1 (um)
            ymargin: for the FDTD region 2 (um)
            xmargin: for the FDTD region
            pml_margin: for all the FDTD region
            wavelength_start: 1.2 (um)
            wavelength_stop: 1.6 (um)
            wavelength_points: 500
            simulation_time: determines the max structure size (3e8/2.4*10e-12*1e6) = 1.25mm
            simulation_temperature: in kelvin 300

    Return:
        Sparameters pandas DataFrame (wavelength_nm, S11m, S11a, S12a ...)
        suffix `a` for angle in radians and `m` for module

    """
    component = component() if callable(component) else component
    sim_settings = dataclasses.asdict(simulation_settings)

    layer_to_thickness = layer_stack.get_layer_to_thickness()
    layer_to_zmin = layer_stack.get_layer_to_zmin()
    layer_to_material = layer_stack.get_layer_to_material()

    if hasattr(component.info, "simulation_settings"):
        sim_settings.update(component.info.simulation_settings)
        logger.info(
            "Updating {component.name} sim settings {component.simulation_settings}"
        )
    for setting in settings.keys():
        if setting not in sim_settings:
            raise ValueError(
                f"`{setting}` is not a valid setting ({list(sim_settings.keys()) + simulation_settings})"
            )

    sim_settings.update(**settings)
    ss = SimulationSettings(**sim_settings)

    component_extended = gf.c.extend_ports(
        component, length=ss.distance_source_to_monitors)

    ports = component_extended.get_ports_list(port_type="optical")
    if not ports:
        raise ValueError(f"`{component.name}` does not have any optical ports")

    c = gf.components.extension.extend_ports(component=component,
                                             length=ss.port_extension)
    c.remove_layers(component.layers - set(layer_to_thickness.keys()))
    c._bb_valid = False
    c.flatten()
    c.name = "top"
    gdspath = c.write_gds()

    filepath = get_sparameters_path(
        component=component,
        dirpath=dirpath,
        layer_to_material=layer_to_material,
        layer_to_thickness=layer_to_thickness,
        **settings,
    )
    filepath_csv = filepath.with_suffix(".csv")
    filepath_sim_settings = filepath.with_suffix(".yml")
    filepath_fsp = filepath.with_suffix(".fsp")

    if run and filepath_csv.exists() and not overwrite:
        logger.info(f"Reading Sparameters from {filepath_csv}")
        return pd.read_csv(filepath_csv)

    if not run and session is None:
        print(run_false_warning)

    logger.info(f"Writing Sparameters to {filepath_csv}")
    x_min = (component.xmin - ss.xmargin - ss.pml_margin) * 1e-6
    x_max = (component.xmax + ss.xmargin + ss.pml_margin) * 1e-6
    y_min = (component.ymin - ss.ymargin - ss.pml_margin) * 1e-6
    y_max = (component.ymax + ss.ymargin + ss.pml_margin) * 1e-6

    port_orientations = [p.orientation for p in ports]

    # bend
    if 90 in port_orientations:
        y_max -= ss.ymargin * 1e-6

    if 270 in port_orientations:
        y_min += ss.ymargin * 1e-6

    layers_thickness = [
        layer_to_thickness[layer] for layer in component.get_layers()
        if layer in layer_to_thickness
    ]
    if not layers_thickness:
        raise ValueError(f"no layers for component {component.get_layers()}"
                         f"in layer stack {layers_thickness.keys()}")
    layers_zmin = [
        layer_to_zmin[layer] for layer in component.get_layers()
        if layer in layer_to_zmin
    ]
    component_thickness = max(layers_thickness)
    component_zmin = min(layers_zmin)

    z = (component_zmin + component_thickness) / 2 * 1e-6
    z_span = (2 * ss.zmargin + component_thickness) * 1e-6

    x_span = x_max - x_min
    y_span = y_max - y_min

    layers = c.get_layers()
    sim_settings.update(dict(layer_stack=layer_stack.to_dict()))

    sim_settings = dict(
        simulation_settings=sim_settings,
        component=component.settings,
        version=__version__,
    )

    logger.info(
        f"Simulation size = {(x_span)*1e6:.3f}, {(y_span)*1e6:.3f}, {z_span*1e6:.3f} um"
    )

    # from pprint import pprint
    # filepath_sim_settings.write_text(omegaconf.OmegaConf.to_yaml(sim_settings))
    # print(filepath_sim_settings)
    # pprint(sim_settings)
    # return

    try:
        import lumapi
    except ModuleNotFoundError as e:
        print(
            "Cannot import lumapi (Python Lumerical API). "
            "You can add set the PYTHONPATH variable or add it with `sys.path.append()`"
        )
        raise e
    except OSError as e:
        raise e

    start = time.time()
    s = session or lumapi.FDTD(hide=False)
    s.newproject()
    s.selectall()
    s.deleteall()
    s.addrect(
        x_min=x_min,
        x_max=x_max,
        y_min=y_min,
        y_max=y_max,
        z=z,
        z_span=z_span,
        index=1.5,
        name="clad",
    )

    material = ss.background_material
    if material not in MATERIAL_NAME_TO_LUMERICAL:
        raise ValueError(
            f"{material} not in {list(MATERIAL_NAME_TO_LUMERICAL.keys())}")
    material = MATERIAL_NAME_TO_LUMERICAL[material]
    s.setnamed("clad", "material", material)

    s.addfdtd(
        dimension="3D",
        x_min=x_min,
        x_max=x_max,
        y_min=y_min,
        y_max=y_max,
        z=z,
        z_span=z_span,
        mesh_accuracy=ss.mesh_accuracy,
        use_early_shutoff=True,
        simulation_time=ss.simulation_time,
        simulation_temperature=ss.simulation_temperature,
    )

    for layer, thickness in layer_to_thickness.items():
        if layer not in layers:
            continue

        if layer not in layer_to_material:
            raise ValueError(f"{layer} not in {layer_to_material.keys()}")

        material_name = layer_to_material[layer]
        if material_name not in MATERIAL_NAME_TO_LUMERICAL:
            raise ValueError(
                f"{material_name} not in {list(MATERIAL_NAME_TO_LUMERICAL.keys())}"
            )
        material_name_lumerical = MATERIAL_NAME_TO_LUMERICAL[material_name]

        if layer not in layer_to_zmin:
            raise ValueError(f"{layer} not in {list(layer_to_zmin.keys())}")

        zmin = layer_to_zmin[layer]
        zmax = zmin + thickness
        z = (zmax + zmin) / 2

        s.gdsimport(str(gdspath), "top", f"{layer[0]}:{layer[1]}")
        layername = f"GDS_LAYER_{layer[0]}:{layer[1]}"
        s.setnamed(layername, "z", z * 1e-6)
        s.setnamed(layername, "z span", thickness * 1e-6)
        s.setnamed(layername, "material", material_name_lumerical)
        logger.info(
            f"adding {layer}, thickness = {thickness} um, zmin = {zmin} um ")

    for i, port in enumerate(ports):
        zmin = layer_to_zmin[port.layer]
        thickness = layer_to_thickness[port.layer]
        z = (zmin + thickness) / 2
        zspan = 2 * ss.port_margin + thickness

        s.addport()
        p = f"FDTD::ports::port {i+1}"
        s.setnamed(p, "x", port.x * 1e-6)
        s.setnamed(p, "y", port.y * 1e-6)
        s.setnamed(p, "z", z * 1e-6)
        s.setnamed(p, "z span", zspan * 1e-6)

        deg = int(port.orientation)
        # if port.orientation not in [0, 90, 180, 270]:
        #     raise ValueError(f"{port.orientation} needs to be [0, 90, 180, 270]")

        if -45 <= deg <= 45:
            direction = "Backward"
            injection_axis = "x-axis"
            dxp = 0
            dyp = 2 * ss.port_margin + port.width
        elif 45 < deg < 90 + 45:
            direction = "Backward"
            injection_axis = "y-axis"
            dxp = 2 * ss.port_margin + port.width
            dyp = 0
        elif 90 + 45 < deg < 180 + 45:
            direction = "Forward"
            injection_axis = "x-axis"
            dxp = 0
            dyp = 2 * ss.port_margin + port.width
        elif 180 + 45 < deg < 180 + 45 + 90:
            direction = "Forward"
            injection_axis = "y-axis"
            dxp = 2 * ss.port_margin + port.width
            dyp = 0

        else:
            raise ValueError(
                f"port {port.name} orientation {port.orientation} is not valid"
            )

        s.setnamed(p, "direction", direction)
        s.setnamed(p, "injection axis", injection_axis)
        s.setnamed(p, "y span", dyp * 1e-6)
        s.setnamed(p, "x span", dxp * 1e-6)
        # s.setnamed(p, "theta", deg)
        s.setnamed(p, "name", port.name)
        # s.setnamed(p, "name", f"o{i+1}")

        logger.info(f"port {p} {port.name}: at ({port.x}, {port.y}, 0)"
                    f"size = ({dxp}, {dyp}, {zspan})")

    s.setglobalsource("wavelength start", ss.wavelength_start * 1e-6)
    s.setglobalsource("wavelength stop", ss.wavelength_stop * 1e-6)
    s.setnamed("FDTD::ports", "monitor frequency points", ss.wavelength_points)

    if run:
        s.save(str(filepath_fsp))
        s.deletesweep("s-parameter sweep")

        s.addsweep(3)
        s.setsweep("s-parameter sweep", "Excite all ports", 0)
        s.setsweep("S sweep", "auto symmetry", True)
        s.runsweep("s-parameter sweep")
        sp = s.getsweepresult("s-parameter sweep", "S parameters")
        s.exportsweep("s-parameter sweep", str(filepath))
        logger.info(f"wrote sparameters to {filepath}")

        keys = [key for key in sp.keys() if key.startswith("S")]
        ra = {
            f"{key}a": list(np.unwrap(np.angle(sp[key].flatten())))
            for key in keys
        }
        rm = {f"{key}m": list(np.abs(sp[key].flatten())) for key in keys}
        wavelength_nm = sp["lambda"].flatten() * 1e9

        results = {"wavelength_nm": wavelength_nm}
        results.update(ra)
        results.update(rm)
        df = pd.DataFrame(results, index=wavelength_nm)

        end = time.time()
        df.to_csv(filepath_csv, index=False)
        sim_settings.update(compute_time_seconds=end - start)
        filepath_sim_settings.write_text(
            omegaconf.OmegaConf.to_yaml(sim_settings))
        return df
    filepath_sim_settings.write_text(omegaconf.OmegaConf.to_yaml(sim_settings))
    return s
예제 #19
0
def write_sweeps(
    filepath: PathType,
    component_factory: Dict[str, Callable] = factory,
    doe_root_path: PathType = CONFIG["cache_doe_directory"],
    doe_metadata_path: PathType = CONFIG["doe_directory"],
    n_cores: int = n_cores,
    overwrite: bool = False,
    precision: float = 1e-9,
    cache: bool = False,
) -> None:
    """Generates a sweep/DOEs of components specified in a yaml file
    allows for each DOE to have its own x and y spacing (more flexible than method1)
    similar to write_doe

    Args:
        filepath: for the does.yml
        component_factory:
        doe_root_path:
        doe_metadata_path:
        n_cores: number of cores
        overwrite:
        precision: for the GDS, defaults to 1nm
        cache: if True uses cache
    """
    doe_root_path = pathlib.Path(doe_root_path)
    doe_metadata_path = pathlib.Path(doe_metadata_path)

    doe_root_path.mkdir(parents=True, exist_ok=True)
    doe_metadata_path.mkdir(parents=True, exist_ok=True)

    dicts, mask_settings = read_sweep(filepath)
    does, templates_by_type = separate_does_from_templates(dicts)

    dict_templates = (templates_by_type["template"]
                      if "template" in templates_by_type else {})

    with_cache_default = mask_settings[
        "cache"] if "cache" in mask_settings else cache

    list_args = []
    for doe_name, doe in does.items():
        doe["name"] = doe_name
        component = doe["component"]

        if component not in component_factory:
            raise ValueError(
                f"{component!r} not in {component_factory.keys()}")

        if "template" in doe:
            # The keyword template is used to enrich the dictionary from the template
            templates = doe["template"]
            if not isinstance(templates, list):
                templates = [templates]
            for template in templates:
                try:
                    doe = update_dicts_recurse(doe, dict_templates[template])
                except Exception:
                    print(template, "does not exist")
                    raise

        do_permutation = doe.pop("do_permutation")
        settings = doe["settings"]
        doe["list_settings"] = get_settings_list(do_permutation, **settings)

        list_args += [doe]

    does_running = []
    start_times = {}
    finish_times = {}
    doe_name_to_process = {}
    while list_args:
        while len(does_running) < n_cores:
            if not list_args:
                break
            doe = list_args.pop()
            doe_name = doe["name"]

            # Only launch a build process if we do not use the cache
            # Or if the DOE is not built

            list_settings = doe["list_settings"]

            with_cache = with_cache_default if "cache" not in doe else doe[
                "cache"]

            _doe_exists = False

            if "doe_template" in doe:
                # this DOE points to another existing component
                _doe_exists = True
                logger.info(f"Using template - {doe_name!r}")
                save_doe_use_template(doe)

            elif with_cache:
                _doe_exists = doe_exists(
                    doe_name=doe_name,
                    list_settings=list_settings,
                    doe_root_path=doe_root_path,
                )
                if _doe_exists:
                    logger.info("Cached - {doe_name!r}")
                    if overwrite:
                        component_names = load_doe_component_names(doe_name)

                        write_sweep_metadata(
                            doe_name=doe["name"],
                            cell_names=component_names,
                            list_settings=doe["list_settings"],
                            doe_metadata_path=doe_metadata_path,
                        )

            if not _doe_exists:
                start_times[doe_name] = time.time()
                p = Process(
                    target=write_sweep,
                    args=(doe, component_factory),
                    kwargs={
                        "doe_root_path": doe_root_path,
                        "doe_metadata_path": doe_metadata_path,
                        "overwrite": overwrite,
                        "precision": precision,
                    },
                )
                doe_name_to_process[doe_name] = p
                does_running += [doe_name]
                try:
                    p.start()
                except Exception:
                    print(f"Issue starting process for {doe_name}")
                    print(type(component_factory))
                    raise

        to_rm = []
        for i, doe_name in enumerate(does_running):
            _p = doe_name_to_process[doe_name]
            if not _p.is_alive():
                to_rm += [i]
                finish_times[doe_name] = time.time()
                dt = finish_times[doe_name] - start_times[doe_name]
                line = "Done - {} ({:.1f}s)".format(doe_name, dt)
                logger.info(line)

        for i in to_rm[::-1]:
            does_running.pop(i)

        time.sleep(0.001)

    while does_running:
        to_rm = []
        for i, _doe_name in enumerate(does_running):
            _p = doe_name_to_process[_doe_name]
            if not _p.is_alive():
                to_rm += [i]
        for i in to_rm[::-1]:
            does_running.pop(i)

        time.sleep(0.05)
예제 #20
0
def build_clean():
    """Cleans generated files such as build/."""
    target = CONFIG["build_directory"]
    if os.path.exists(target):
        shutil.rmtree(target)
        logger.info(f"Deleted {target}")
예제 #21
0
def write_sparameters_meep_mpi_pool(
    jobs: List[Dict],
    cores_per_run: int = 2,
    total_cores: int = 4,
    temp_dir: Path = temp_dir_default,
    delete_temp_files: bool = True,
    dirpath: Path = sparameters_path,
    layer_stack: LayerStack = LAYER_STACK,
    **kwargs,
) -> List[Path]:
    """Write Sparameters and returns the filepaths
    Given a list of write_sparameters_meep keyword arguments (the "jobs"),
        launches them in different cores
    Each simulation is assigned "cores_per_run" cores
    A total of "total_cores" is assumed, if cores_per_run * len(jobs) > total_cores
    then the overflow will run sequentially (not in parallel)

    Args
        jobs: list of Dicts containing the simulation settings for each job.
            for write_sparameters_meep
        cores_per_run: number of processors to assign to each component simulation
        total_cores: total number of cores to use
        temp_dir: temporary directory to hold simulation files
        delete_temp_files: deletes temp_dir when done
        dirpath: directory to store Sparameters
        layer_stack:

    keyword Args:
        overwrite: overwrites stored simulation results.
        dispersive: use dispersive models for materials (requires higher resolution)
        extend_ports_length: to extend ports beyond the PML
        zmargin_top: thickness for cladding above core
        zmargin_bot: thickness for cladding below core
        tpml: PML thickness (um)
        clad_material: material for cladding
        is_3d: if True runs in 3D
        wl_min: wavelength min (um)
        wl_max: wavelength max (um)
        wl_steps: wavelength steps
        dfcen: delta frequency
        port_source_name: input port name
        port_field_monitor_name:
        port_margin: margin on each side of the port
        distance_source_to_monitors: in (um) source goes before
        port_source_offset: offset between source GDS port and source MEEP port
        port_monitor_offset: offset between monitor GDS port and monitor MEEP port

    Returns:
        filepath list for sparameters CSV (wavelengths, s11a, s12m, ...)
            where `a` is the angle in radians and `m` the module

    """
    # Parse jobs
    jobs_to_run = []
    for job in jobs:
        settings = remove_simulation_kwargs(kwargs)
        filepath = job.get(
            "filepath",
            get_sparameters_path(
                component=job["component"],
                dirpath=dirpath,
                layer_stack=layer_stack,
                **settings,
            ),
        )
        if filepath.exists():
            job.update(**kwargs)
            if job.get("overwrite", False):
                pathlib.Path.unlink(filepath)
                logger.info(
                    f"Simulation {filepath!r} found and overwrite is True. "
                    "Deleting file and adding it to the queue."
                )
                jobs_to_run.append(job)
            else:
                logger.info(
                    f"Simulation {filepath!r} found exists and "
                    "overwrite is False. Removing it from the queue."
                )
        else:
            logger.info(f"Simulation {filepath!r} not found. Adding it to the queue")
            jobs_to_run.append(job)

    # Update jobs
    jobs = jobs_to_run

    # Setup pools
    num_pools = int(np.ceil(cores_per_run * len(jobs) / total_cores))
    jobs_per_pool = int(np.floor(total_cores / cores_per_run))
    njobs = len(jobs)

    logger.info(f"Running parallel simulations over {njobs} jobs")
    logger.info(
        f"Using a total of {total_cores} cores with {cores_per_run} cores per job"
    )
    logger.info(
        f"Tasks split amongst {num_pools} pools with up to {jobs_per_pool} jobs each."
    )

    i = 0
    # For each pool
    for j in range(num_pools):
        filepaths = []

        # For each job in the pool
        for k in range(jobs_per_pool):
            # Flag to catch non full pools
            if i >= njobs:
                continue
            logger.info(f"Task {k} of pool {j} is job {i}")

            # Obtain current job
            simulations_settings = jobs[i]

            filepath = write_sparameters_meep_mpi(
                cores=cores_per_run,
                temp_dir=temp_dir,
                temp_file_str=f"write_sparameters_meep_mpi_{i}",
                wait_to_finish=False,
                **simulations_settings,
            )
            filepaths.append(filepath)

            # Increment task number
            i += 1

        # Wait for pool to end
        done = False
        num_pool_jobs = len(filepaths)
        while not done:
            # Check if all jobs finished
            jobs_done = 0
            for filepath in filepaths:
                if filepath.exists():
                    jobs_done += 1
            if jobs_done == num_pool_jobs:
                done = True
            else:
                time.sleep(1)

    if delete_temp_files:
        shutil.rmtree(temp_dir)
    return filepaths
예제 #22
0
def place_and_write(filepath_yaml,
                    root_does=CONFIG["cache_doe_directory"],
                    filepath_gds="top_level.gds"):
    c = place_from_yaml(filepath_yaml, root_does)
    logger.info("writing...")
    c.write(filepath_gds)
예제 #23
0
def component_grid_from_yaml(filepath: Path,
                             precision: float = 1e-9) -> Component:
    """Returns a Component composed of DOEs/components given in a yaml file
    allows for each DOE to have its own x and y spacing (more flexible than method1)
    """
    input_does = OmegaConf.load(str(filepath))
    mask_settings = input_does["mask"]
    does = read_sweep(filepath)

    placed_doe = None
    placed_does = {}
    if mask_settings.get("name"):
        component_grid = gf.Component(mask_settings["name"])
    else:
        component_grid = gf.Component()

    default_cache_enabled = (mask_settings["cache_enabled"]
                             if "cache_enabled" in mask_settings else False)
    for doe_name, doe in does.items():
        list_settings = doe["settings"]
        component_type = doe["component"]

        # description = sweep["description"] if "description" in sweep else ''
        # test = sweep["test"] if "test" in sweep else {}
        # analysis = sweep["analysis"] if "analysis" in sweep else {}

        # Get DOE policy concerning the cache
        cache_enabled = (doe["cache_enabled"]
                         if "cache_enabled" in doe else default_cache_enabled)

        components = None

        # If cache enabled, attempt to load from cache
        if cache_enabled:
            try:
                components = load_doe_from_cache(doe_name)
            except Exception as e:
                logger.error(e)
                components = None

        # If no component is loaded, build them
        if components is None:
            print("{} - Generating components...".format(doe_name))
            components = build_components(component_type, list_settings)

            # After building the components, if cache enabled, save them
            if cache_enabled:
                save_doe(doe_name, components, precision=precision)
        else:
            logger.info("{} - Loaded components from cache".format(doe_name))

        # logger.info(doe_name, [c.name for c in components])
        # Find placer information

        default_settings = {"align_x": "W", "align_y": "S", "margin": 10}

        if "placer" in doe:
            placer_type = doe["placer"].pop("type")
            _placer = PLACER_NAME2FUNC[placer_type]

            # All other attributes are assumed to be settings for the placer
            settings = default_settings.copy()
            settings.update(doe["placer"])

            # x0, y0 can either be float or string
            x0 = settings.pop("x0")
            y0 = settings.pop("y0")

            # Check whether we are doing relative or absolute placement
            if (x0 in ["E", "W"] or y0 in ["N", "S"]) and not placed_doe:
                raise ValueError("At least one DOE must be placed to use\
                relative placement")

            # For relative placement (to previous DOE)
            if "margin_x" not in settings:
                settings["margin_x"] = settings["margin"]
            if "margin_y" not in settings:
                settings["margin_y"] = settings["margin"]

            margin_x = settings["margin_x"]
            margin_y = settings["margin_y"]
            align_x = settings["align_x"]
            align_y = settings["align_y"]

            # Making sure that the alignment is sensible depending on how we stack

            # If we specify a DOE to place next to, use it
            if "next_to" in settings:
                placed_doe = placed_does[settings.pop("next_to")]

            # Otherwise, use previously placed DOE as starting point
            if x0 == "E":
                x0 = placed_doe.size_info.east
                if align_x == "W":
                    x0 += margin_x

            if x0 == "W":
                x0 = placed_doe.size_info.west
                if align_x == "E":
                    x0 -= margin_x

            if y0 == "N":
                y0 = placed_doe.size_info.north
                if align_y == "S":
                    y0 += margin_y

            if y0 == "S":
                y0 = placed_doe.size_info.south
                if align_y == "N":
                    y0 -= margin_y

            # Add x0, y0 in settings as float
            settings["x0"] = x0
            settings["y0"] = y0

            placed_components = _placer(components, **settings)
        else:
            # If no placer is specified, we assume this is a grid placer
            cols, rows = doe["shape"]
            x0, y0 = doe["origin"]
            dx, dy = doe["spacing"]

            placed_components = placer_grid_cell_refs(components,
                                                      cols,
                                                      rows,
                                                      dx,
                                                      dy,
                                                      x0=x0,
                                                      y0=y0)

        # Place components within a cell having the DOE name
        placed_doe = gf.Component()
        placed_doe.add(placed_components)
        placed_doe.name = doe_name
        placed_does[doe_name] = placed_doe

        # # Write the json and md metadata / report
        # write_doe_metadata(
        # doe_name=doe_name,
        # cells = placed_components,
        # list_settings=list_settings,
        # flag_write_component=False,
        # description=description,
        # test=test,
        # analysis=analysis
        # )

        component_grid.add_ref(placed_doe)

    return component_grid
예제 #24
0
    write_sparameters_meep,
    write_sparameters_meep_lr,
    write_sparameters_meep_lt,
)
from gdsfactory.simulation.gmeep.write_sparameters_meep_mpi import (
    write_sparameters_meep_mpi,
    write_sparameters_meep_mpi_lr,
    write_sparameters_meep_mpi_lt,
)
from gdsfactory.simulation.gmeep.write_sparameters_meep_mpi_pool import (
    write_sparameters_meep_mpi_pool,
    write_sparameters_meep_mpi_pool_lr,
    write_sparameters_meep_mpi_pool_lt,
)

logger.info(f"Meep {mp.__version__!r} installed at {mp.__path__!r}")

__all__ = [
    "get_simulation",
    "get_sparameters_data_meep",
    "write_sparameters_meep",
    "write_sparameters_meep_lr",
    "write_sparameters_meep_lt",
    "write_sparameters_meep_mpi",
    "write_sparameters_meep_mpi_lr",
    "write_sparameters_meep_mpi_lt",
    "write_sparameters_meep_mpi_pool",
    "write_sparameters_meep_mpi_pool_lr",
    "write_sparameters_meep_mpi_pool_lt",
    "plot",
    "port_symmetries",
예제 #25
0
def import_gds(
    gdspath: Union[str, Path],
    cellname: Optional[str] = None,
    flatten: bool = False,
    snap_to_grid_nm: Optional[int] = None,
    name: Optional[str] = None,
    decorator: Optional[Callable] = None,
    gdsdir: Optional[Union[str, Path]] = None,
    **kwargs,
) -> Component:
    """Returns a Componenent from a GDS file.

    Adapted from phidl/geometry.py

    if any cell names are found on the component CACHE we append a $ with a
    number to the name

    Args:
        gdspath: path of GDS file.
        cellname: cell of the name to import (None) imports top cell.
        flatten: if True returns flattened (no hierarchy)
        snap_to_grid_nm: snap to different nm grid (does not snap if False)
        name: Optional name. Over-rides the default imported name.
        decorator: function to apply over the imported gds.
        gdsdir: optional GDS directory.
        kwargs: settings for the imported component (polarization, wavelength ...).
    """
    gdspath = Path(gdsdir) / Path(gdspath) if gdsdir else Path(gdspath)
    if not gdspath.exists():
        raise FileNotFoundError(f"No file {gdspath!r} found")

    metadata_filepath = gdspath.with_suffix(".yml")

    gdsii_lib = gdspy.GdsLibrary()
    gdsii_lib.read_gds(str(gdspath))
    top_level_cells = gdsii_lib.top_level()
    cellnames = [c.name for c in top_level_cells]

    if cellname is not None:
        if cellname not in gdsii_lib.cells:
            raise ValueError(
                f"cell {cellname} is not in file {gdspath} with cells {cellnames}"
            )
        topcell = gdsii_lib.cells[cellname]
    elif cellname is None and len(top_level_cells) == 1:
        topcell = top_level_cells[0]
    elif cellname is None and len(top_level_cells) > 1:
        raise ValueError(
            f"import_gds() There are multiple top-level cells in {gdspath!r}, "
            f"you must specify `cellname` to select of one of them among {cellnames}"
        )

    if name:
        if name in CACHE or name in CACHE_IMPORTED_CELLS:
            raise ValueError(
                f"name = {name!r} already on cache. "
                "Please, choose a different name or set name = None. ")
        else:
            topcell.name = name

    if flatten:
        component = Component(name=name or cellname or cellnames[0])
        polygons = topcell.get_polygons(by_spec=True)

        for layer_in_gds, polys in polygons.items():
            component.add_polygon(polys, layer=layer_in_gds)

        component = avoid_duplicated_cells(component)

    else:
        D_list = []
        cell_to_device = {}
        for c in gdsii_lib.cells.values():
            D = Component(name=c.name)
            D.polygons = c.polygons
            D.references = c.references
            D.name = c.name
            for label in c.labels:
                rotation = label.rotation
                if rotation is None:
                    rotation = 0
                label_ref = D.add_label(
                    text=label.text,
                    position=np.asfarray(label.position),
                    magnification=label.magnification,
                    rotation=rotation * 180 / np.pi,
                    layer=(label.layer, label.texttype),
                )
                label_ref.anchor = label.anchor

            D = avoid_duplicated_cells(D)
            D.unlock()

            cell_to_device.update({c: D})
            D_list += [D]

        for D in D_list:
            # First convert each reference so it points to the right Device
            converted_references = []
            for e in D.references:
                ref_device = cell_to_device[e.ref_cell]
                if isinstance(e, gdspy.CellReference):
                    dr = DeviceReference(
                        device=ref_device,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
                elif isinstance(e, gdspy.CellArray):
                    dr = CellArray(
                        device=ref_device,
                        columns=e.columns,
                        rows=e.rows,
                        spacing=e.spacing,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
            D.references = converted_references

            # Next convert each Polygon
            # temp_polygons = list(D.polygons)
            # D.polygons = []
            # for p in temp_polygons:
            #     D.add_polygon(p)

            # Next convert each Polygon
            temp_polygons = list(D.polygons)
            D.polygons = []
            for p in temp_polygons:
                if snap_to_grid_nm:
                    points_on_grid = snap_to_grid(p.polygons[0],
                                                  nm=snap_to_grid_nm)
                    p = gdspy.Polygon(points_on_grid,
                                      layer=p.layers[0],
                                      datatype=p.datatypes[0])
                D.add_polygon(p)
        component = cell_to_device[topcell]
        cast(Component, component)

    name = name or component.name
    component.name = name

    if metadata_filepath.exists():
        logger.info(f"Read YAML metadata from {metadata_filepath}")
        metadata = OmegaConf.load(metadata_filepath)

        for port_name, port in metadata.ports.items():
            if port_name not in component.ports:
                component.add_port(
                    name=port_name,
                    midpoint=port.midpoint,
                    width=port.width,
                    orientation=port.orientation,
                    layer=port.layer,
                    port_type=port.port_type,
                )

        component.info = metadata.info

    component.info.update(**kwargs)
    component.name = name
    component.info.name = name

    if decorator:
        component_new = decorator(component)
        component = component_new or component
    if flatten:
        component.flatten()
    component.lock()
    return component