def __init__(self, hamtide_dataset_directory: PathLike = None): if hamtide_dataset_directory is None: hamtide_dataset_directory = self.OPENDAP_URL else: try: if Path(hamtide_dataset_directory).exists(): hamtide_dataset_directory = Path(hamtide_dataset_directory) if len(list(hamtide_dataset_directory.glob('*.nc'))) == 0: raise FileNotFoundError( f'no NetCDF files found at ' f'"{hamtide_dataset_directory}"') except OSError: raise ValueError('given resource must be a local path') super().__init__(hamtide_dataset_directory) datasets = {'elevation': {}, 'velocity': {}} for variable in datasets.keys(): datasets[variable].update({ constituent.lower(): { 'path': None, 'dataset': None } for constituent in self.constituents }) self.datasets = datasets
def get_latest_json_file_name(prefix: str = "", path: PathLike = JSON_DATA_PATH) -> PathLike: """Return the latest json file in path with prefix.""" path = Path(path) try: return max(path.glob(f"*{prefix}*.json"), key=getctime) except ValueError: raise NoMatchingDataPathError(path=path, prefix=prefix)
def is_exist_gbk(acc: str, search_root: PathLike) -> bool: """Check gbk file exists already. Args: acc (str): accession number. search_root (PathLike): the root dir tu search. Returns: bool: """ search_root = Path(search_root).resolve() acc = acc.split(".")[0] return bool(list(search_root.glob(f"**/{acc}.gbk")))
def __init__(self, hamtide_dataset_directory: PathLike = None): if hamtide_dataset_directory is not None: hamtide_dataset_directory = Path(hamtide_dataset_directory) if len(list(hamtide_dataset_directory.glob('*.nc'))) == 0: raise FileNotFoundError( f'no NetCDF files found at "{hamtide_dataset_directory}"') else: hamtide_dataset_directory = self.OPENDAP_URL super().__init__(hamtide_dataset_directory) datasets = {'elevation': {}, 'velocity': {}} for variable in datasets.keys(): datasets[variable].update({ constituent.lower(): { 'path': None, 'dataset': None } for constituent in self.constituents }) self.datasets = datasets