Example #1
0
def avoid_duplicated_cells(c: Component) -> Component:
    """Ensures import_gds cells do not create duplicated cell names
    with the ones in CACHE.
    if component in CACHE or CACHE_IMPORTED_CELLS we get it from there

    """

    # rename cell if it is already on any CACHE
    if c.name in CACHE or c.name in CACHE_IMPORTED_CELLS:
        i = 1
        new_name = f"{c.name}${i}"
        while new_name in CACHE or new_name in CACHE_IMPORTED_CELLS:
            i += 1
            new_name = f"{c.name}${i}"

        c.name = new_name
        CACHE_IMPORTED_CELLS[c.name] = c

    # if is not on CACHE add it to CACHE_IMPORTED_CELLS
    else:
        CACHE_IMPORTED_CELLS[c.name] = c
    return c
Example #2
0
def import_gds(
    gdspath: Union[str, Path],
    cellname: Optional[str] = None,
    flatten: bool = False,
    snap_to_grid_nm: Optional[int] = None,
    decorator: Optional[Callable] = None,
    **kwargs,
) -> Component:
    """Returns a Componenent from a GDS file.

    Adapted from phidl/geometry.py

    Args:
        gdspath: path of GDS file
        cellname: cell of the name to import (None) imports top cell
        flatten: if True returns flattened (no hierarchy)
        snap_to_grid_nm: snap to different nm grid (does not snap if False)
        **kwargs
    """
    gdspath = Path(gdspath)
    if not gdspath.exists():
        raise FileNotFoundError(f"No file {gdspath} found")
    gdsii_lib = gdspy.GdsLibrary()
    gdsii_lib.read_gds(str(gdspath))
    top_level_cells = gdsii_lib.top_level()
    cellnames = [c.name for c in top_level_cells]

    if cellname is not None:
        if cellname not in gdsii_lib.cells:
            raise ValueError(
                f"cell {cellname} is not in file {gdspath} with cells {cellnames}"
            )
        topcell = gdsii_lib.cells[cellname]
    elif cellname is None and len(top_level_cells) == 1:
        topcell = top_level_cells[0]
    elif cellname is None and len(top_level_cells) > 1:
        raise ValueError(
            f"import_gds() There are multiple top-level cells in {gdspath}, "
            f"you must specify `cellname` to select of one of them among {cellnames}"
        )
    if flatten:
        component = Component()
        polygons = topcell.get_polygons(by_spec=True)

        for layer_in_gds, polys in polygons.items():
            component.add_polygon(polys, layer=layer_in_gds)

    else:
        D_list = []
        c2dmap = {}
        for cell in gdsii_lib.cells.values():
            D = Component(name=cell.name)
            D.polygons = cell.polygons
            D.references = cell.references
            D.name = cell.name
            for label in cell.labels:
                rotation = label.rotation
                if rotation is None:
                    rotation = 0
                label_ref = D.add_label(
                    text=label.text,
                    position=np.asfarray(label.position),
                    magnification=label.magnification,
                    rotation=rotation * 180 / np.pi,
                    layer=(label.layer, label.texttype),
                )
                label_ref.anchor = label.anchor
            c2dmap.update({cell: D})
            D_list += [D]

        for D in D_list:
            # First convert each reference so it points to the right Device
            converted_references = []
            for e in D.references:
                ref_device = c2dmap[e.ref_cell]
                if isinstance(e, gdspy.CellReference):
                    dr = DeviceReference(
                        device=ref_device,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
                elif isinstance(e, gdspy.CellArray):
                    dr = CellArray(
                        device=ref_device,
                        columns=e.columns,
                        rows=e.rows,
                        spacing=e.spacing,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
            D.references = converted_references

            # Next convert each Polygon
            # temp_polygons = list(D.polygons)
            # D.polygons = []
            # for p in temp_polygons:
            #     D.add_polygon(p)

            # Next convert each Polygon
            temp_polygons = list(D.polygons)
            D.polygons = []
            for p in temp_polygons:
                if snap_to_grid_nm:
                    points_on_grid = snap_to_grid(p.polygons[0], nm=snap_to_grid_nm)
                    p = gdspy.Polygon(
                        points_on_grid, layer=p.layers[0], datatype=p.datatypes[0]
                    )
                D.add_polygon(p)
        component = c2dmap[topcell]
        cast(Component, component)
    for key, value in kwargs.items():
        setattr(component, key, value)
    if decorator:
        decorator(component)
    component._autoname = False
    return component
Example #3
0
def import_gds(
    gdspath: Union[str, Path],
    cellname: Optional[str] = None,
    flatten: bool = False,
    snap_to_grid_nm: Optional[int] = None,
    name: Optional[str] = None,
    decorator: Optional[Callable] = None,
    gdsdir: Optional[Union[str, Path]] = None,
    **kwargs,
) -> Component:
    """Returns a Componenent from a GDS file.

    Adapted from phidl/geometry.py

    if any cell names are found on the component CACHE we append a $ with a
    number to the name

    Args:
        gdspath: path of GDS file.
        cellname: cell of the name to import (None) imports top cell.
        flatten: if True returns flattened (no hierarchy)
        snap_to_grid_nm: snap to different nm grid (does not snap if False)
        name: Optional name. Over-rides the default imported name.
        decorator: function to apply over the imported gds.
        gdsdir: optional GDS directory.
        kwargs: settings for the imported component (polarization, wavelength ...).
    """
    gdspath = Path(gdsdir) / Path(gdspath) if gdsdir else Path(gdspath)
    if not gdspath.exists():
        raise FileNotFoundError(f"No file {gdspath!r} found")

    metadata_filepath = gdspath.with_suffix(".yml")

    gdsii_lib = gdspy.GdsLibrary()
    gdsii_lib.read_gds(str(gdspath))
    top_level_cells = gdsii_lib.top_level()
    cellnames = [c.name for c in top_level_cells]

    if cellname is not None:
        if cellname not in gdsii_lib.cells:
            raise ValueError(
                f"cell {cellname} is not in file {gdspath} with cells {cellnames}"
            )
        topcell = gdsii_lib.cells[cellname]
    elif cellname is None and len(top_level_cells) == 1:
        topcell = top_level_cells[0]
    elif cellname is None and len(top_level_cells) > 1:
        raise ValueError(
            f"import_gds() There are multiple top-level cells in {gdspath!r}, "
            f"you must specify `cellname` to select of one of them among {cellnames}"
        )

    if name:
        if name in CACHE or name in CACHE_IMPORTED_CELLS:
            raise ValueError(
                f"name = {name!r} already on cache. "
                "Please, choose a different name or set name = None. ")
        else:
            topcell.name = name

    if flatten:
        component = Component(name=name or cellname or cellnames[0])
        polygons = topcell.get_polygons(by_spec=True)

        for layer_in_gds, polys in polygons.items():
            component.add_polygon(polys, layer=layer_in_gds)

        component = avoid_duplicated_cells(component)

    else:
        D_list = []
        cell_to_device = {}
        for c in gdsii_lib.cells.values():
            D = Component(name=c.name)
            D.polygons = c.polygons
            D.references = c.references
            D.name = c.name
            for label in c.labels:
                rotation = label.rotation
                if rotation is None:
                    rotation = 0
                label_ref = D.add_label(
                    text=label.text,
                    position=np.asfarray(label.position),
                    magnification=label.magnification,
                    rotation=rotation * 180 / np.pi,
                    layer=(label.layer, label.texttype),
                )
                label_ref.anchor = label.anchor

            D = avoid_duplicated_cells(D)
            D.unlock()

            cell_to_device.update({c: D})
            D_list += [D]

        for D in D_list:
            # First convert each reference so it points to the right Device
            converted_references = []
            for e in D.references:
                ref_device = cell_to_device[e.ref_cell]
                if isinstance(e, gdspy.CellReference):
                    dr = DeviceReference(
                        device=ref_device,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
                elif isinstance(e, gdspy.CellArray):
                    dr = CellArray(
                        device=ref_device,
                        columns=e.columns,
                        rows=e.rows,
                        spacing=e.spacing,
                        origin=e.origin,
                        rotation=e.rotation,
                        magnification=e.magnification,
                        x_reflection=e.x_reflection,
                    )
                    dr.owner = D
                    converted_references.append(dr)
            D.references = converted_references

            # Next convert each Polygon
            # temp_polygons = list(D.polygons)
            # D.polygons = []
            # for p in temp_polygons:
            #     D.add_polygon(p)

            # Next convert each Polygon
            temp_polygons = list(D.polygons)
            D.polygons = []
            for p in temp_polygons:
                if snap_to_grid_nm:
                    points_on_grid = snap_to_grid(p.polygons[0],
                                                  nm=snap_to_grid_nm)
                    p = gdspy.Polygon(points_on_grid,
                                      layer=p.layers[0],
                                      datatype=p.datatypes[0])
                D.add_polygon(p)
        component = cell_to_device[topcell]
        cast(Component, component)

    name = name or component.name
    component.name = name

    if metadata_filepath.exists():
        logger.info(f"Read YAML metadata from {metadata_filepath}")
        metadata = OmegaConf.load(metadata_filepath)

        for port_name, port in metadata.ports.items():
            if port_name not in component.ports:
                component.add_port(
                    name=port_name,
                    midpoint=port.midpoint,
                    width=port.width,
                    orientation=port.orientation,
                    layer=port.layer,
                    port_type=port.port_type,
                )

        component.info = metadata.info

    component.info.update(**kwargs)
    component.name = name
    component.info.name = name

    if decorator:
        component_new = decorator(component)
        component = component_new or component
    if flatten:
        component.flatten()
    component.lock()
    return component
Example #4
0
def extrude(
    p: Path,
    cross_section: Optional[CrossSectionOrFactory] = None,
    layer: Optional[Layer] = None,
    width: Optional[float] = None,
    widths: Optional[Float2] = None,
    simplify: Optional[float] = None,
) -> Component:
    """Returns Component extruding a Path with a cross_section.

    A path can be extruded using any CrossSection returning a Component

    The CrossSection defines the layer numbers, widths and offsetts

    adapted from phidl.path

    Args:
        p: a path is a list of points (arc, straight, euler)
        cross_section: to extrude
        layer:
        width:
        widths: tuple of starting and end width
        simplify: Tolerance value for the simplification algorithm.
          All points that can be removed without changing the resulting
          polygon by more than the value listed here will be removed.
    """
    if cross_section is None and layer is None:
        raise ValueError("CrossSection or layer needed")

    if cross_section is not None and layer is not None:
        raise ValueError("Define only CrossSection or layer")

    if layer is not None and width is None and widths is None:
        raise ValueError("Need to define layer width or widths")
    elif width:
        cross_section = CrossSection()
        cross_section.add(width=width, layer=layer)

    elif widths:
        cross_section = CrossSection()
        cross_section.add(width=_linear_transition(widths[0], widths[1]), layer=layer)

    xsection_points = []
    c = Component()

    cross_section = cross_section() if callable(cross_section) else cross_section
    snap_to_grid = cross_section.info.get("snap_to_grid", None)

    for section in cross_section.sections:
        width = section["width"]
        offset = section["offset"]
        layer = section["layer"]
        ports = section["ports"]
        port_types = section["port_types"]
        hidden = section["hidden"]

        if isinstance(width, (int, float)) and isinstance(offset, (int, float)):
            xsection_points.append([width, offset])
        if isinstance(layer, int):
            layer = (layer, 0)
        if (
            isinstance(layer, Iterable)
            and len(layer) == 2
            and isinstance(layer[0], int)
            and isinstance(layer[1], int)
        ):
            xsection_points.append([layer[0], layer[1]])

        if callable(offset):
            P_offset = p.copy()
            P_offset.offset(offset)
            points = P_offset.points
            start_angle = P_offset.start_angle
            end_angle = P_offset.end_angle
            offset = 0
        else:
            points = p.points
            start_angle = p.start_angle
            end_angle = p.end_angle

        if callable(width):
            # Compute lengths
            dx = np.diff(p.points[:, 0])
            dy = np.diff(p.points[:, 1])
            lengths = np.cumsum(np.sqrt((dx) ** 2 + (dy) ** 2))
            lengths = np.concatenate([[0], lengths])
            width = width(lengths / lengths[-1])
        else:
            pass

        points1 = p._centerpoint_offset_curve(
            points,
            offset_distance=offset + width / 2,
            start_angle=start_angle,
            end_angle=end_angle,
        )
        points2 = p._centerpoint_offset_curve(
            points,
            offset_distance=offset - width / 2,
            start_angle=start_angle,
            end_angle=end_angle,
        )

        # Simplify lines using the Ramer–Douglas–Peucker algorithm
        if isinstance(simplify, bool):
            raise ValueError(
                "[PHIDL] the simplify argument must be a number (e.g. 1e-3) or None"
            )
        if simplify is not None:
            points1 = _simplify(points1, tolerance=simplify)
            points2 = _simplify(points2, tolerance=simplify)

        if snap_to_grid:
            snap_to_grid_nm = snap_to_grid * 1e3
            points1 = (
                snap_to_grid_nm
                * np.round(np.array(points1) * 1e3 / snap_to_grid_nm)
                / 1e3
            )
            points2 = (
                snap_to_grid_nm
                * np.round(np.array(points2) * 1e3 / snap_to_grid_nm)
                / 1e3
            )

        # Join points together
        points = np.concatenate([points1, points2[::-1, :]])

        layers = layer if hidden else [layer, layer]
        if not hidden and p.length() > 1e-3:
            c.add_polygon(points, layer=layer)
        # Add ports if they were specified
        if ports[0] is not None:
            orientation = (p.start_angle + 180) % 360
            _width = width if np.isscalar(width) else width[0]
            new_port = c.add_port(
                name=ports[0],
                layer=layers[0],
                port_type=port_types[0],
                width=_width,
                orientation=orientation,
                cross_section=cross_section.cross_sections[0]
                if hasattr(cross_section, "cross_sections")
                else cross_section,
            )
            new_port.endpoints = (points1[0], points2[0])
        if ports[1] is not None:
            orientation = (p.end_angle + 180) % 360
            _width = width if np.isscalar(width) else width[-1]
            new_port = c.add_port(
                name=ports[1],
                layer=layers[1],
                port_type=port_types[1],
                width=_width,
                orientation=orientation,
                cross_section=cross_section.cross_sections[1]
                if hasattr(cross_section, "cross_sections")
                else cross_section,
            )
            new_port.endpoints = (points2[-1], points1[-1])

    points = np.concatenate((p.points, np.array(xsection_points)))
    c.name = f"path_{hash_points(points)[:26]}"
    # c.path = path
    # c.cross_section = cross_section
    return c