def test_creation(self): assert isinstance(indexstore(), argopy.stores.argo_index.indexstore) assert isinstance(indexstore(cache=1), argopy.stores.argo_index.indexstore) assert isinstance(indexstore(cache=1, cachedir="."), argopy.stores.argo_index.indexstore) assert isinstance(indexstore(index_file="toto.txt"), argopy.stores.argo_index.indexstore)
def init(self, box: list): """ Create Argo data loader Parameters ---------- box : list(float, float, float, float, float, float, str, str) The box domain to load all Argo data for: box = [lon_min, lon_max, lat_min, lat_max, pres_min, pres_max, datim_min, datim_max] """ # We use a full domain definition (x, y, z, t) as argument for compatibility with the other fetchers # but we work only with x, y and t. if len(box) not in [6, 8]: raise ValueError('Box must be 6 or 8 length') if len(box) == 6: self.BOX = [box[ii] for ii in [0, 1, 2, 3]] elif len(box) == 8: self.BOX = [box[ii] for ii in [0, 1, 2, 3, 6, 7]] # if len(box) == 6: # # Select the last months of data: # end = pd.to_datetime('now') # start = end - pd.DateOffset(months=1) # self.BOX.append(start.strftime('%Y-%m-%d')) # self.BOX.append(end.strftime('%Y-%m-%d')) self.fs_index = indexstore( self.cache, self.cachedir, os.path.sep.join([self.local_ftp, "ar_index_global_prof.txt"]))
def init(self, box: list): """ Create Argo data loader Parameters ---------- box : list() The box domain to load all Argo data for, with one of the following convention: - box = [lon_min, lon_max, lat_min, lat_max, pres_min, pres_max] - box = [lon_min, lon_max, lat_min, lat_max, pres_min, pres_max, datim_min, datim_max] """ # We use a full domain definition (x, y, z, t) as argument for compatibility with the other fetchers # but at this point, we internally work only with x, y and t. self.BOX = box self.indexBOX = [box[ii] for ii in [0, 1, 2, 3]] if len(box) == 8: self.indexBOX = [box[ii] for ii in [0, 1, 2, 3, 6, 7]] # if len(box) == 6: # # Select the last months of data: # end = pd.to_datetime('now') # start = end - pd.DateOffset(months=1) # self.BOX.append(start.strftime('%Y-%m-%d')) # self.BOX.append(end.strftime('%Y-%m-%d')) self.fs_index = indexstore(self.cache, self.cachedir, os.path.sep.join([self.local_ftp, "ar_index_global_prof.txt"]))
def init(self, box: list = [-180, 180, -90, 90, 0, 6000, '1900-01-01', '2100-12-31']): """ Create Argo data loader Parameters ---------- box : list(float, float, float, float, float, float, str, str) The box domain to load all Argo data for: box = [lon_min, lon_max, lat_min, lat_max, pres_min, pres_max, datim_min, datim_max] """ # We use a full domain definition (x, y, z, t) as argument for compatibility with the other fetchers # but we work only with x, y and t. if len(box) not in [6, 8]: raise ValueError('Box must be 6 or 8 length') if len(box) == 6: self.BOX = [box[ii] for ii in [0, 1, 2, 3]] elif len(box) == 8: self.BOX = [box[ii] for ii in [0, 1, 2, 3, 6, 7]] self.fs_index = indexstore(self.cache, self.cachedir, os.path.sep.join([self.local_ftp, "ar_index_global_prof.txt"]))
def __init__(self, local_ftp: str = "", index_file: str = "ar_index_global_prof.txt", cache: bool = False, cachedir: str = "", **kwargs): """ Init fetcher Parameters ---------- local_path : str Path to the directory with the 'dac' folder and index file """ self.cache = cache self.definition = 'Local ftp Argo index fetcher' self.local_ftp = OPTIONS['local_ftp'] if local_ftp == '' else local_ftp check_localftp(self.local_ftp, errors='raise') # Validate local_ftp self.index_file = index_file self.fs = indexstore(cache, cachedir, os.path.sep.join([self.local_ftp, self.index_file])) self.dataset_id = 'index' self.init(**kwargs)
def test_search_box(self): for kw in self.kwargs_box: df = indexstore(cache=0, index_file=self.index_file).open_dataframe( indexfilter_box(**kw)) assert isinstance(df, pd.core.frame.DataFrame)
def test_search_wmo(self): for kw in self.kwargs_wmo: df = indexstore(cache=False, index_file=self.index_file).read_csv( indexfilter_wmo(**kw)) assert isinstance(df, pd.core.frame.DataFrame)