def shape(self): logging.info(f"Accessing '{self.location}' for 'shape'") z_info = info(str(self.location)) nx = [] for n in z_info.grid.nx: nx.append(n.item()) return tuple(nx)
def axes_units(self): logging.info(f"Accessing '{self.location}' for 'axes_units'") units = [] z_info = info(str(self.location)) for axis in z_info.grid.axis: units.append(axis.units) return np.array(units)
def quantity_labels(self) -> List[str]: logging.info(f"Accessing '{self.location}' for 'quantity_labels'") z_info = info(str(self.location)) names = [] for quant in self.quantity_names: names.append(z_info.particles.qlabels[quant]) return names
def axes_labels(self): logging.info(f"Accessing '{self.location}' for 'axes_labels'") long_names = [] z_info = info(str(self.location)) for axis in z_info.grid.axis: long_names.append(axis.label) return np.array(long_names)
def axes_max(self): logging.info(f"Accessing '{self.location}' for 'axes_max'") max_values = [] z_info = info(str(self.location)) for axis in z_info.grid.axis: max_values.append(axis.max) return np.array(max_values)
def quantity_units(self) -> List[str]: logging.info(f"Accessing '{self.location}' for 'quantity_units'") z_info = info(str(self.location)) units = [] for quant in self.quantity_names: units.append(z_info.particles.qunits[quant]) return units
def axes_names(self): logging.info(f"Accessing '{self.location}' for 'axes_names'") names = [] z_info = info(str(self.location)) for axis in z_info.grid.axis: label = axis.label names.append(axis.name or self.clean(label)) return np.array(names)
def quantity_names(self) -> List[str]: logging.info(f"Accessing '{self.location}' for 'quantity_names'") z_info = info(str(self.location)) quantities = [] for key in z_info.particles.quants: if key == "tag": continue quantities.append(key) return quantities
def is_valid_backend(file_path: Path) -> bool: if not file_path.is_file(): return False if not file_path.suffix == ".zdf": return False z_info = info(str(file_path)) if hasattr(z_info, "type"): if z_info.type == "particles": return True return False
def is_valid_backend(file_path: Union[Path, str]) -> bool: if isinstance(file_path, str): file_path = Path(file_path) if not isinstance(file_path, Path): return False if not file_path.is_file(): return False if not file_path.suffix == ".zdf": return False z_info = info(str(file_path)) if hasattr(z_info, "type"): if z_info.type == "grid": return True return False
def dataset_unit(self): logging.info(f"Accessing '{self.location}' for 'dataset_unit'") z_info = info(str(self.location)) return z_info.grid.units
def ndim(self): logging.info(f"Accessing '{self.location}' for 'ndim'") z_info = info(str(self.location)) return z_info.grid.ndims
def dataset_label(self) -> str: logging.info(f"Accessing '{self.location}' for 'dataset_label'") z_info = info(str(self.location)) return z_info.grid.label
def dataset_name(self) -> str: logging.info(f"Accessing '{self.location}' for 'dataset_name'") z_info = info(str(self.location)) label = z_info.grid.label return z_info.grid.name or self.clean(label)
def time_unit(self) -> str: logging.info(f"Accessing '{self.location}' for 'time_unit'") z_info = info(str(self.location)) return z_info.iteration.tunits
def time_step(self) -> float: logging.info(f"Accessing '{self.location}' for 'time_step'") z_info = info(str(self.location)) return z_info.iteration.t
def iteration(self) -> int: logging.info(f"Accessing '{self.location}' for 'iteration'") z_info = info(str(self.location)) return z_info.iteration.n
def dataset_name(self) -> str: logging.info(f"Accessing '{self.location}' for 'dataset_name'") z_info = info(str(self.location)) return z_info.particles.name
def num_particles(self) -> int: logging.info(f"Accessing '{self.location}' for 'num_particles'") z_info = info(str(self.location)) return z_info.particles.nparts