def load_component(gdspath: PosixPath) -> Component: """Returns Component from gdspath, with ports (CSV) and metadata (JSON) info (if any)""" if not gdspath.exists(): raise FileNotFoundError(f"No such file '{gdspath}'") ports_filepath = gdspath.with_suffix(".ports") metadata_filepath = gdspath.with_suffix(".json") c = pp.import_gds(gdspath) if ports_filepath.exists(): with open(str(ports_filepath), newline="") as csvfile: reader = csv.reader(csvfile, delimiter=",", quotechar="|") for r in reader: layer_type = int(r[5].strip().strip("(")) data_type = int(r[6].strip().strip(")")) c.add_port( name=r[0], midpoint=[float(r[1]), float(r[2])], orientation=int(r[3]), width=float(r[4]), layer=(layer_type, data_type), ) if metadata_filepath.exists(): with open(metadata_filepath) as f: data = json.load(f) cell_settings = data["cells"][c.name] c.settings.update(cell_settings) return c
def merge_metadata(gdspath: PosixPath, labels_prefix: str = "opt", label_layer: Tuple[int, int] = pp.LAYER.LABEL, **kwargs): """Merges all JSON metadata into a big JSON. Args: gdspath: GDSpath labels_prefix label_layer: layer for the labels """ mdpath = gdspath.with_suffix(".md") jsonpath = gdspath.with_suffix(".json") build_directory = gdspath.parent.parent doe_directory = build_directory / "doe" write_labels(gdspath=gdspath, prefix=labels_prefix, label_layer=label_layer) merge_json(doe_directory=doe_directory, jsonpath=jsonpath, **kwargs) merge_markdown(reports_directory=doe_directory, mdpath=mdpath) merge_test_metadata(gdspath, labels_prefix=labels_prefix)
def lock(path: pathlib.PosixPath): """Creates a lock file, a file protected from beeing used by other processes. (The lock file isn't the same as the file of the passed path.) Args: path (pathlib.PosixPath): path to the file """ path = path.with_suffix('.lock') if not path.exists(): path.touch() with path.open("r+") as lock_file: try: fcntl.lockf(lock_file.fileno(), fcntl.LOCK_EX) yield lock_file finally: fcntl.lockf(lock_file.fileno(), fcntl.LOCK_UN)