def test_nocache(self): with argopy.set_options(cachedir="dummy"): loader = ArgoDataFetcher(src=self.src, cache=False).profile(6902746, 34) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def test_nocache(self): with argopy.set_options(cachedir="dummy", local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=False).profile(2901623, 12) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def test_clearcache(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=True).float(2901623) loader.to_xarray() loader.clear_cache() with pytest.raises(CacheFileNotFound): loader.fetcher.cachepath
def test_nocache(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir): loader = ArgoDataFetcher( src=self.src, cache=False).profile(*self.requests['profile'][0]) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def test_caching_profile(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=True).profile(2901623, 1) # 1st call to load and save to cachedir: loader.to_xarray() # 2nd call to load from cached file ds = loader.to_xarray() assert isinstance(ds, xr.Dataset) assert is_list_of_strings(loader.fetcher.uri) assert is_list_of_strings(loader.fetcher.cachepath)
def test_clearcache(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir): loader = ArgoDataFetcher(src=self.src, cache=True).float( self.requests["float"][0]) loader.to_xarray( ) # 1st call to load from source and save in memory loader.to_xarray( ) # 2nd call to load from memory and save in cache loader.clear_cache() with pytest.raises(CacheFileNotFound): loader.fetcher.cachepath
def test_caching_region(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=True).region( [-60, -40, 40., 60., 0., 100., '2007-08-01', '2007-09-01'] ) # 1st call to load and save to cachedir: loader.to_xarray() # 2nd call to load from cached file ds = loader.to_xarray() assert isinstance(ds, xr.Dataset) assert is_list_of_strings(loader.fetcher.uri) assert is_list_of_strings(loader.fetcher.cachepath)
def test_clear_cache(): ftproot, flist = argopy.tutorial.open_dataset("localftp") with tempfile.TemporaryDirectory() as cachedir: with argopy.set_options(cachedir=cachedir, local_ftp=ftproot): loader = ArgoDataFetcher(src="localftp", cache=True).profile(2902696, 12) loader.to_xarray( ) # 1st call to load from source and save in memory loader.to_xarray( ) # 2nd call to load from memory and save in cache argopy.clear_cache() assert os.path.exists(cachedir) is True assert len(os.listdir(cachedir)) == 0
def __testthis_profile(self, dataset): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, 'ds': dataset} for arg in self.args['profile']: f = ArgoDataFetcher(**fetcher_args).profile(*arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri)
def test_chunks_region(self): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, "parallel": True, 'chunks': {'lon': 1, 'lat': 2, 'dpt': 1, 'time': 2}} for access_arg in self.requests["region"]: f = ArgoDataFetcher(**fetcher_args).region(access_arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri)
def __test_region(self, bk): """ Test float for a given backend """ for arg in self.args["region"]: for mode in self.mode: f = ArgoDataFetcher(src=bk, mode=mode).region(arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri)
def __test_float(self, bk, **ftc_opts): """ Test float for a given backend """ for arg in self.args["float"]: for mode in self.mode: options = {**self.fetcher_opts, **ftc_opts} f = ArgoDataFetcher(src=bk, mode=mode, **options).float(arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri)
def test_caching_profile(self): with argopy.set_options(cachedir=self.testcachedir): loader = ArgoDataFetcher(src=self.src, cache=True).profile(6902746, 34) try: # 1st call to load from erddap and save to cachedir: ds = loader.to_xarray() # 2nd call to load from cached file ds = loader.to_xarray() assert isinstance(ds, xr.Dataset) assert isinstance(loader.fetcher.cachepath, str) shutil.rmtree(self.testcachedir) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! shutil.rmtree(self.testcachedir) pass except Exception: shutil.rmtree(self.testcachedir) raise
def test_chunks_wmo(self): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, "parallel": True, "chunks_maxsize": {'wmo': 1}} for access_arg in self.requests["wmo"]: # f = ArgoDataFetcher(**fetcher_args).float(access_arg) f = ArgoDataFetcher(**fetcher_args).profile(access_arg, 1) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri) assert len(f.fetcher.uri) == len(access_arg)
def test_chunks_wmo(self): for access_arg in self.requests["wmo"]: fetcher_args = { "src": self.src, "parallel": True, "chunks_maxsize": { "wmo": 1 }, } f = ArgoDataFetcher(**fetcher_args).profile(access_arg, 12).fetcher assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri) assert len(f.uri) == len(access_arg)
def test_chunks_region(self): for access_arg in self.requests["region"]: fetcher_args = { "src": self.src, "parallel": True, "chunks": { "lon": 1, "lat": 2, "dpt": 1, "time": 2 }, } f = ArgoDataFetcher(**fetcher_args).region(access_arg).fetcher assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri) assert len(f.uri) == np.prod( [v for k, v in fetcher_args["chunks"].items()])