def test_invalid_accesspoint(self): with pytest.raises(InvalidFetcherAccessPoint): ArgoDataFetcher(src=self.src).invalid_accesspoint.to_xarray( ) # Can't get data if access point not defined first with pytest.raises(InvalidFetcher): ArgoDataFetcher(src=self.src).to_xarray( ) # Can't get data if access point not defined first
def test_invalid_accesspoint(): src = list(AVAILABLE_SOURCES.keys())[0] # Use the first valid data source with pytest.raises(InvalidFetcherAccessPoint): ArgoDataFetcher(src=src).invalid_accesspoint.to_xarray( ) # Can't get data if access point not defined first with pytest.raises(InvalidFetcherAccessPoint): ArgoDataFetcher(src=src).to_xarray( ) # Can't get data if access point not defined first
def test_invalidFTPpath(self): with pytest.raises(ValueError): with argopy.set_options(local_ftp="dummy"): ArgoDataFetcher(src=self.src).profile(2901623, 12) with pytest.raises(FtpPathError): with argopy.set_options(local_ftp=os.path.sep.join([self.local_ftp, "dac"])): ArgoDataFetcher(src=self.src).profile(2901623, 12)
def test_clear_cache(): ftproot, flist = argopy.tutorial.open_dataset("localftp") with tempfile.TemporaryDirectory() as cachedir: with argopy.set_options(cachedir=cachedir, local_ftp=ftproot): ArgoDataFetcher(src="localftp").profile(2902696, 12).to_xarray() ArgoDataFetcher(src="localftp").profile(2902696, 13).to_xarray() argopy.clear_cache() assert os.path.exists(cachedir) is True assert len(os.listdir(cachedir)) == 0
def init_data(self): # Fetch real data to test interpolation try: self.ds_pts_standard = ArgoDataFetcher(src='erddap', mode='standard')\ .region([-75, -55, 30., 40., 0, 100., '2011-01-01', '2011-01-15'])\ .to_xarray() self.ds_pts_expert = ArgoDataFetcher(src='erddap', mode='expert')\ .region([-75, -55, 30., 40., 0, 100., '2011-01-01', '2011-01-15'])\ .to_xarray() except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass except ValueError: # Catches value error for incorrect standard levels as inputs pass
def __testthis_region(self, dataset): for arg in self.args['region']: try: ds = ArgoDataFetcher(src=self.src, ds=dataset).region(arg).to_xarray() assert isinstance(ds, xr.Dataset) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass except Exception: print( "ERDDAP request:\n", ArgoDataFetcher(src=self.src, ds=dataset).region(arg).fetcher.url) pass
def test_cachepath_notfound(self): with argopy.set_options(cachedir=self.testcachedir): loader = ArgoDataFetcher(src=self.src, cache=True).profile(6902746, 34) with pytest.raises(CacheFileNotFound): loader.fetcher.cachepath shutil.rmtree(self.testcachedir) # Make sure the cache is left empty
def test_nocache(self): with argopy.set_options(cachedir="dummy"): loader = ArgoDataFetcher(src=self.src, cache=False).profile(6902746, 34) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def __testthis_profile(self, dataset): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, 'ds': dataset} for arg in self.args['profile']: f = ArgoDataFetcher(**fetcher_args).profile(*arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri)
def test_nocache(self): with argopy.set_options(cachedir="dummy", local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=False).profile(2901623, 12) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def test_chunks_region(self): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, "parallel": True, 'chunks': {'lon': 1, 'lat': 2, 'dpt': 1, 'time': 2}} for access_arg in self.requests["region"]: f = ArgoDataFetcher(**fetcher_args).region(access_arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri)
def __testthis_float(self, dataset): with argopy.set_options(local_ftp=self.local_ftp): for arg in self.args['float']: try: ds = ArgoDataFetcher(src=self.src, ds=dataset).float(arg).to_xarray() assert isinstance(ds, xr.Dataset) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass except Exception: print( "ERROR LOCALFTP request:\n", ArgoDataFetcher(src=self.src, ds=dataset).float(arg).fetcher.files) pass
def __test_float(self, bk, **ftc_opts): """ Test float for a given backend """ for arg in self.args["float"]: for mode in self.mode: options = {**self.fetcher_opts, **ftc_opts} f = ArgoDataFetcher(src=bk, mode=mode, **options).float(arg) self.__assert_fetcher(f)
def __test_region(self, bk): """ Test float for a given backend """ for arg in self.args["region"]: for mode in self.mode: f = ArgoDataFetcher(src=bk, mode=mode).region(arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri)
def __test_float(self, bk, **ftc_opts): """ Test float for a given backend """ for arg in self.args["float"]: for mode in self.mode: options = {**self.fetcher_opts, **ftc_opts} f = ArgoDataFetcher(src=bk, mode=mode, **options).float(arg) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.uri)
def __test_region(self, bk): """ Test float for a given backend """ for arg in self.args['region']: try: ds = ArgoDataFetcher(src=bk).region(arg).to_xarray() assert isinstance(ds, xr.Dataset) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass
def test_clearcache(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=True).float(2901623) loader.to_xarray() loader.clear_cache() with pytest.raises(CacheFileNotFound): loader.fetcher.cachepath
def test_point2profile2point(): try: ds_pts = ArgoDataFetcher(src='erddap') \ .region([-75, -55, 30., 40., 0, 100., '2011-01-01', '2011-01-15']) \ .to_xarray() assert ds_pts.argo.point2profile().argo.profile2point().equals(ds_pts) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass
def test_cachepath_notfound(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir): loader = ArgoDataFetcher( src=self.src, cache=True).profile(*self.requests['profile'][0]) with pytest.raises(CacheFileNotFound): loader.fetcher.cachepath
def __get_fetcher(self, empty: bool = False, pt: str = 'profile'): f = ArgoDataFetcher(src=self.src) # f.valid_access_points[0] if pt == 'float': if not empty: return f, ArgoDataFetcher(src=self.src).float(2901623) else: return f, ArgoDataFetcher(src=self.src).float(12) if pt == 'profile': if not empty: return f, ArgoDataFetcher(src=self.src).profile(2901623, 12) else: return f, ArgoDataFetcher(src=self.src).profile(12, 1200) if pt == 'region': if not empty: return f, ArgoDataFetcher(src=self.src).region([ -60, -55, 40.0, 45.0, 0.0, 10.0, "2007-08-01", "2007-09-01" ]) else: return f, ArgoDataFetcher(src=self.src).region([ -60, -55, 40.0, 45.0, 99.92, 99.99, "2007-08-01", "2007-08-01" ])
def test_nocache(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir): loader = ArgoDataFetcher( src=self.src, cache=False).profile(*self.requests['profile'][0]) loader.to_xarray() with pytest.raises(FileSystemHasNoCache): loader.fetcher.cachepath
def test_clear_cache(): # Fetch data to cache: ftproot, flist = argopy.tutorial.open_dataset('localftp') testcachedir = os.path.expanduser(os.path.join("~", ".argopytest_tmp")) with argopy.set_options(cachedir=testcachedir, local_ftp=ftproot): ArgoDataFetcher(src='localftp').profile(2902696, 12).to_xarray() # Then clean it: argopy.clear_cache() assert os.path.isdir(testcachedir) is False
def test_chunks_wmo(self): with argopy.set_options(local_ftp=self.local_ftp): fetcher_args = {"src": self.src, "parallel": True, "chunks_maxsize": {'wmo': 1}} for access_arg in self.requests["wmo"]: # f = ArgoDataFetcher(**fetcher_args).float(access_arg) f = ArgoDataFetcher(**fetcher_args).profile(access_arg, 1) assert isinstance(f.to_xarray(), xr.Dataset) assert is_list_of_strings(f.fetcher.uri) assert len(f.fetcher.uri) == len(access_arg)
def __test_float(self, bk, **ftc_opts): """ Test float for a given backend """ for arg in self.args['float']: options = {**self.fetcher_opts, **ftc_opts} try: ds = ArgoDataFetcher(src=bk, **options).float(arg).to_xarray() assert isinstance(ds, xr.Dataset) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass
def test_caching(self): cachedir = os.path.expanduser(os.path.join("~", ".argopytest_tmp")) try: # 1st call to load from erddap and save to cachedir: ds = ArgoDataFetcher(backend='erddap', cache=True, cachedir=cachedir).profile(6902746, 34).to_xarray() # 2nd call to load from cached file ds = ArgoDataFetcher(backend='erddap', cache=True, cachedir=cachedir).profile(6902746, 34).to_xarray() assert isinstance(ds, xr.Dataset) == True shutil.rmtree(cachedir) except: shutil.rmtree(cachedir) raise
def test_profile2point(): try: ds = ArgoDataFetcher(src='erddap')\ .region([-75, -55, 30., 40., 0, 100., '2011-01-01', '2011-01-15'])\ .to_xarray() with pytest.raises(InvalidDatasetStructure): ds.argo.profile2point() except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! pass
def test_caching_profile(self): cachedir = os.path.expanduser(os.path.join("~", ".argopytest_tmp")) try: # 1st call to load from erddap and save to cachedir: ds = ArgoDataFetcher(src='erddap', cache=True, cachedir=cachedir).profile(6902746, 34).to_xarray() # 2nd call to load from cached file ds = ArgoDataFetcher(src='erddap', cache=True, cachedir=cachedir).profile(6902746, 34).to_xarray() assert isinstance(ds, xr.Dataset) == True shutil.rmtree(cachedir) except ErddapServerError: # Test is passed when something goes wrong because of the erddap server, not our fault ! shutil.rmtree(cachedir) pass except: shutil.rmtree(cachedir) raise
def test_methods(self): args_list = [ {"src": self.src, "parallel": "thread"}, {"src": self.src, "parallel": True, "parallel_method": "thread"}, {"src": self.src, "parallel": "process"}, {"src": self.src, "parallel": True, "parallel_method": "process"}, ] with argopy.set_options(local_ftp=self.local_ftp): for fetcher_args in args_list: loader = ArgoDataFetcher(**fetcher_args).float(self.requests["wmo"][0]) assert isinstance(loader, argopy.fetchers.ArgoDataFetcher) args_list = [ {"src": self.src, "parallel": "toto"}, {"src": self.src, "parallel": True, "parallel_method": "toto"}, ] with argopy.set_options(local_ftp=self.local_ftp): for fetcher_args in args_list: with pytest.raises(ValueError): ArgoDataFetcher(**fetcher_args).float(self.requests["wmo"][0])
def test_caching_profile(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp): loader = ArgoDataFetcher(src=self.src, cache=True).profile(2901623, 1) # 1st call to load and save to cachedir: loader.to_xarray() # 2nd call to load from cached file ds = loader.to_xarray() assert isinstance(ds, xr.Dataset) assert is_list_of_strings(loader.fetcher.uri) assert is_list_of_strings(loader.fetcher.cachepath)
def test_caching_region(self): with tempfile.TemporaryDirectory() as testcachedir: with argopy.set_options(cachedir=testcachedir): fetcher = (ArgoDataFetcher(src=self.src, cache=True).region( self.requests['region'][1]).fetcher) # 1st call to load and save to cachedir: fetcher.to_xarray() # 2nd call to load from cached file ds = fetcher.to_xarray() assert isinstance(ds, xr.Dataset) assert is_list_of_strings(fetcher.uri) assert is_list_of_strings(fetcher.cachepath)