def test_chunks_region(self):
     with argopy.set_options(local_ftp=self.local_ftp):
         fetcher_args = {"src": self.src, "parallel": True, 'chunks': {'lon': 1, 'lat': 2, 'dpt': 1, 'time': 2}}
         for access_arg in self.requests["region"]:
             f = ArgoDataFetcher(**fetcher_args).region(access_arg)
             assert isinstance(f.to_xarray(), xr.Dataset)
             assert is_list_of_strings(f.fetcher.uri)
Exemplo n.º 2
0
 def test_nocache(self):
     with argopy.set_options(cachedir="dummy"):
         loader = ArgoDataFetcher(src=self.src,
                                  cache=False).profile(6902746, 34)
         loader.to_xarray()
         with pytest.raises(FileSystemHasNoCache):
             loader.fetcher.cachepath
Exemplo n.º 3
0
 def test_nocache(self):
     with argopy.set_options(cachedir="dummy", local_ftp=self.local_ftp):
         loader = ArgoDataFetcher(src=self.src,
                                  cache=False).profile(2901623, 12)
         loader.to_xarray()
         with pytest.raises(FileSystemHasNoCache):
             loader.fetcher.cachepath
Exemplo n.º 4
0
 def __test_region(self, bk):
     """ Test float for a given backend """
     for arg in self.args["region"]:
         for mode in self.mode:
             f = ArgoDataFetcher(src=bk, mode=mode).region(arg)
             assert isinstance(f.to_xarray(), xr.Dataset)
             assert is_list_of_strings(f.uri)
 def __testthis_profile(self, dataset):
     with argopy.set_options(local_ftp=self.local_ftp):
         fetcher_args = {"src": self.src, 'ds': dataset}
         for arg in self.args['profile']:
             f = ArgoDataFetcher(**fetcher_args).profile(*arg)
             assert isinstance(f.to_xarray(), xr.Dataset)
             assert is_list_of_strings(f.fetcher.uri)
 def test_clearcache(self):
     with tempfile.TemporaryDirectory() as testcachedir:
         with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp):
             loader = ArgoDataFetcher(src=self.src, cache=True).float(2901623)
             loader.to_xarray()
             loader.clear_cache()
             with pytest.raises(CacheFileNotFound):
                 loader.fetcher.cachepath
Exemplo n.º 7
0
 def __test_float(self, bk, **ftc_opts):
     """ Test float for a given backend """
     for arg in self.args["float"]:
         for mode in self.mode:
             options = {**self.fetcher_opts, **ftc_opts}
             f = ArgoDataFetcher(src=bk, mode=mode, **options).float(arg)
             assert isinstance(f.to_xarray(), xr.Dataset)
             assert is_list_of_strings(f.uri)
 def test_chunks_wmo(self):
     with argopy.set_options(local_ftp=self.local_ftp):
         fetcher_args = {"src": self.src, "parallel": True, "chunks_maxsize": {'wmo': 1}}
         for access_arg in self.requests["wmo"]:
             # f = ArgoDataFetcher(**fetcher_args).float(access_arg)
             f = ArgoDataFetcher(**fetcher_args).profile(access_arg, 1)
             assert isinstance(f.to_xarray(), xr.Dataset)
             assert is_list_of_strings(f.fetcher.uri)
             assert len(f.fetcher.uri) == len(access_arg)
 def test_nocache(self):
     with tempfile.TemporaryDirectory() as testcachedir:
         with argopy.set_options(cachedir=testcachedir):
             loader = ArgoDataFetcher(
                 src=self.src,
                 cache=False).profile(*self.requests['profile'][0])
             loader.to_xarray()
             with pytest.raises(FileSystemHasNoCache):
                 loader.fetcher.cachepath
 def test_caching_profile(self):
     with tempfile.TemporaryDirectory() as testcachedir:
         with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp):
             loader = ArgoDataFetcher(src=self.src, cache=True).profile(2901623, 1)
             # 1st call to load and save to cachedir:
             loader.to_xarray()
             # 2nd call to load from cached file
             ds = loader.to_xarray()
             assert isinstance(ds, xr.Dataset)
             assert is_list_of_strings(loader.fetcher.uri)
             assert is_list_of_strings(loader.fetcher.cachepath)
 def test_chunks_wmo(self):
     for access_arg in self.requests["wmo"]:
         fetcher_args = {
             "src": self.src,
             "parallel": True,
             "chunks_maxsize": {
                 "wmo": 1
             },
         }
         f = ArgoDataFetcher(**fetcher_args).profile(access_arg, 12).fetcher
         assert isinstance(f.to_xarray(), xr.Dataset)
         assert is_list_of_strings(f.uri)
         assert len(f.uri) == len(access_arg)
Exemplo n.º 12
0
def test_clear_cache():
    ftproot, flist = argopy.tutorial.open_dataset("localftp")
    with tempfile.TemporaryDirectory() as cachedir:
        with argopy.set_options(cachedir=cachedir, local_ftp=ftproot):
            loader = ArgoDataFetcher(src="localftp",
                                     cache=True).profile(2902696, 12)
            loader.to_xarray(
            )  # 1st call to load from source and save in memory
            loader.to_xarray(
            )  # 2nd call to load from memory and save in cache
            argopy.clear_cache()
            assert os.path.exists(cachedir) is True
            assert len(os.listdir(cachedir)) == 0
 def test_caching_region(self):
     with tempfile.TemporaryDirectory() as testcachedir:
         with argopy.set_options(cachedir=testcachedir, local_ftp=self.local_ftp):
             loader = ArgoDataFetcher(src=self.src, cache=True).region(
                 [-60, -40, 40., 60., 0., 100., '2007-08-01', '2007-09-01']
             )
             # 1st call to load and save to cachedir:
             loader.to_xarray()
             # 2nd call to load from cached file
             ds = loader.to_xarray()
             assert isinstance(ds, xr.Dataset)
             assert is_list_of_strings(loader.fetcher.uri)
             assert is_list_of_strings(loader.fetcher.cachepath)
 def test_chunks_region(self):
     for access_arg in self.requests["region"]:
         fetcher_args = {
             "src": self.src,
             "parallel": True,
             "chunks": {
                 "lon": 1,
                 "lat": 2,
                 "dpt": 1,
                 "time": 2
             },
         }
         f = ArgoDataFetcher(**fetcher_args).region(access_arg).fetcher
         assert isinstance(f.to_xarray(), xr.Dataset)
         assert is_list_of_strings(f.uri)
         assert len(f.uri) == np.prod(
             [v for k, v in fetcher_args["chunks"].items()])
Exemplo n.º 15
0
 def test_caching_profile(self):
     with argopy.set_options(cachedir=self.testcachedir):
         loader = ArgoDataFetcher(src=self.src,
                                  cache=True).profile(6902746, 34)
         try:
             # 1st call to load from erddap and save to cachedir:
             ds = loader.to_xarray()
             # 2nd call to load from cached file
             ds = loader.to_xarray()
             assert isinstance(ds, xr.Dataset)
             assert isinstance(loader.fetcher.cachepath, str)
             shutil.rmtree(self.testcachedir)
         except ErddapServerError:  # Test is passed when something goes wrong because of the erddap server, not our fault !
             shutil.rmtree(self.testcachedir)
             pass
         except Exception:
             shutil.rmtree(self.testcachedir)
             raise
Exemplo n.º 16
0
def main():
    unsaved = []
    for i in list_float:
        try:
            argo_loader = ArgoDataFetcher()

            #access certain data and profile
            ds = argo_loader.float(i).to_xarray()
            #conver to Panda
            df = ds.to_dataframe()

            with open(str(i) + '.csv', 'w') as f:
                df.to_csv(f, header=f.tell() == 0)

            print('> ' + str(i) + '.csv saved.')
        # some floats contain bad data and can not be saved
        except:
            print('> file could not be saved ' + str(i))
            unsaved.append(i)
            pass
    print('Number of unsaved floats:', len(unsaved))
Exemplo n.º 17
0
 def test_clearcache(self):
     with tempfile.TemporaryDirectory() as testcachedir:
         with argopy.set_options(cachedir=testcachedir):
             loader = ArgoDataFetcher(src=self.src, cache=True).float(
                 self.requests["float"][0])
             loader.to_xarray(
             )  # 1st call to load from source and save in memory
             loader.to_xarray(
             )  # 2nd call to load from memory and save in cache
             loader.clear_cache()
             with pytest.raises(CacheFileNotFound):
                 loader.fetcher.cachepath
Exemplo n.º 18
0
# Accessing argo data by float using Argopy

from argopy import DataFetcher as ArgoDataFetcher
plt.style.use('default')

argo_loader = ArgoDataFetcher()
argo_loader = ArgoDataFetcher(backend='erddap')
argo_loader = ArgoDataFetcher(cachedir='tmp')

apDS=argo_loader.float(6901254).to_xarray()

apDS2=argo_loader.profile(6901254,1).to_xarray()

data=apDS2.argo.point2profile()
data

The core-Argo profile files contain the core parameters provided by a float: pressure, temperature, salinity, conductivity (PRES, TEMP, PSAL, CNDC). All additional parameters are managed in B-Argo data files (see §0).   

fig, ax = plt.subplots(1,2,figsize=(8,10))

#Temperature
ax[0].plot(data.TEMP[0],-data.PRES[0],'ro',label='N_PROF=0 D Fetcher')
ax[0].plot(data.TEMP[1],-data.PRES[1],'bo',label='N_PROF=1 A Fetcher')
ax[0].plot(cy1D.TEMP[0],-cy1D.PRES[0],'k-',label='N_PROF=0 D')
ax[0].set_title(cy1.TEMP.long_name)
ax[0].set_ylabel(cy1.PRES.long_name)
ax[0].grid()
ax[0].legend();

ax[1].plot(data.PSAL[0],-data.PRES[0],'ro',label='N_PROF=0 D Fetcher')
ax[1].plot(data.PSAL[1],-data.PRES[1],'bo',label='N_PROF=1 A Fetcher')
Exemplo n.º 19
0
argo8 = ('5903135', dt.strptime('2015-10-15 17:40:00',
                                time_format), -41.212, -56.244, 'NOc Antares',
         dt.strptime('2017-11-15 06:25:00', time_format))

buoys = [argo1, argo2, argo3, argo4, argo5, argo6, argo7, argo8]

columns_details = [
    'argo_id', 'start_date', 'latitude', 'longitude', 'ship_plataform',
    'last_date'
]
argos_details = pd.DataFrame.from_records(buoys, columns=columns_details)

### Download data from ARGO platform

argo_loader = ArgoDataFetcher()

argo_teste = argo_loader.float(list_argos)

df_argo = argo_teste.to_dataframe()

df_argo_final = df_argo[[
    'CYCLE_NUMBER', 'DIRECTION', 'PLATFORM_NUMBER', 'PRES', 'PSAL', 'TEMP',
    'TIME', 'LONGITUDE', 'LATITUDE'
]]

df_argo_final.loc[:, ['PSAL', 'TEMP']] = df_argo_final[[
    'PSAL', 'TEMP'
]].apply(lambda x: pd.Series.round(x, 3))

### INSERTING DATA ON database
Exemplo n.º 20
0
 def test_bar_plot_profile(self):
     with argopy.set_options(local_ftp=self.local_ftp):
         for arg in self.requests["profile"]:
             loader = ArgoDataFetcher(src=self.src).profile(*arg)
             self.__test_bar_plot(loader.to_index(full=True))