def test_index_cache(self): self.data_store = EsaCciOdpDataStore( index_cache_used=True, index_cache_expiration_days=1.0e-6) data_sources = self.data_store.query() self.assertIsNotNone(data_sources) for data_source in data_sources: data_source.update_file_list()
def test_make_local_spatial(self): data_store = EsaCciOdpDataStore() data_source = data_store.query(ds_id='esacci.SST.day.L4.SSTdepth.multi-sensor.multi-platform.OSTIA.1-1.r1')[0] # The following always worked fine: ds = data_source.open_dataset(time_range=['2010-01-01', '2010-01-04'], region='-10,40,20,70') self.assertIsNotNone(ds) # The following reproduced Cate issues #823, #822, #818, #816, #783: ds = data_source.make_local('SST_DAY_L4', time_range=['2010-01-01', '2010-01-04'], region='-10,40,20,70') self.assertIsNotNone(ds)
def test_make_local_wo_subsets(self): data_store = EsaCciOdpDataStore() local_data_store = DATA_STORE_REGISTRY.get_data_store('local') cci_dataset_collection = 'esacci.OZONE.mon.L3.NP.multi-sensor.multi-platform.MERGED.fv0002.r1' data_source = data_store.query(cci_dataset_collection)[0] random_string = f"test{random.choice(string.ascii_lowercase)}" ds = data_source.make_local(random_string) self.assertIsNotNone(ds) local_data_store.remove_data_source(f"local.{random_string}")
def test_make_local_spatial_3(self): data_store = EsaCciOdpDataStore() # The following reproduces Cate issue #904: cci_dataset_collection = 'esacci.AEROSOL.5-days.L3C.AEX.GOMOS.Envisat.AERGOM.2-19.r1' data_source = data_store.query(cci_dataset_collection)[0] ds_from_remote_source = data_source.open_dataset(time_range=['2002-04-01', '2002-04-06'], var_names=['AEX550_uncertainty', 'ANG400-800-AEX'], region='-113.9, 40.0,-113.8, 40.1') self.assertIsNotNone(ds_from_remote_source)
def test_for_duplicates_in_drs_ids(self): data_store = EsaCciOdpDataStore() data_sets = data_store.query() ids = [] for dataset in data_sets: ids.append(dataset.id) if len(ids) == len(set(ids)): contains_duplicates = False else: contains_duplicates = True self.assertFalse(contains_duplicates)
class EsaCciOdpDataStoreIndexCacheTest(unittest.TestCase): def test_index_cache(self): self.data_store = EsaCciOdpDataStore(index_cache_used=True, index_cache_expiration_days=1.0e-6) data_sources = self.data_store.query() self.assertIsNotNone(data_sources) for data_source in data_sources: data_source.update_file_list()
def test_make_local_spatial_2(self): data_store = EsaCciOdpDataStore() local_data_store = DATA_STORE_REGISTRY.get_data_store('local') # The following reproduces Cate issues #823, #822, #818, #816, #783, #892, #900: cci_dataset_collection = 'esacci.SST.day.L4.SSTdepth.multi-sensor.multi-platform.OSTIA.1-1.r1' data_source = data_store.query(cci_dataset_collection)[0] ds_from_remote_source = data_source.open_dataset(time_range=['1991-09-01', '1991-09-03'], var_names=['sea_ice_fraction', 'analysed_sst'], region='-2.8, 70.6,-2.7, 70.7') self.assertIsNotNone(ds_from_remote_source) random_string = f"test{random.choice(string.ascii_lowercase)}" ds = data_source.make_local(random_string, time_range=['1991-09-01', '1991-09-03'], region='-2.8, 70.6,-2.7, 70.7') self.assertIsNotNone(ds) local_data_store.remove_data_source(f"local.{random_string}")
def _create_test_data_store(): with open(os.path.join(os.path.dirname(__file__), 'esgf-index-cache.json')) as fp: json_text = fp.read() json_dict = json.loads(json_text) # The EsaCciOdpDataStore created with an initial json_dict avoids fetching it from remote data_store = EsaCciOdpDataStore('test-odp', index_cache_json_dict=json_dict) DATA_STORE_REGISTRY.add_data_store(data_store) return data_store
def test_make_local_spatial_1(self): data_store = EsaCciOdpDataStore() local_data_store = DATA_STORE_REGISTRY.get_data_store('local') # The following reproduces Cate issues #823, #822, #818, #816, #783, #892, #900: cci_dataset_collection = 'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AVHRR-3.Metop-A.AVHRRMTA_G.2-1.r1' data_source = data_store.query(cci_dataset_collection)[0] ds_from_remote_source = data_source.open_dataset(time_range=['2006-11-21', '2006-11-23'], var_names=['sst_dtime', 'sea_surface_temperature_depth'], region='-49.8, 13.1,-49.7, 13.2') self.assertIsNotNone(ds_from_remote_source) random_string = f"test{random.choice(string.ascii_lowercase)}" ds = data_source.make_local(random_string, time_range=['2006-11-21', '2006-11-23'], region='-49.8, 13.1,-49.7, 13.2') self.assertIsNotNone(ds) local_data_store.remove_data_source(f"local.{random_string}")
def _create_test_data_store(): with open(os.path.join(os.path.dirname(__file__), '..', 'ds', 'esgf-index-cache.json')) as fp: json_text = fp.read() json_dict = json.loads(json_text) # The EsaCciOdpDataStore created with an initial json_dict avoids fetching it from remote DS = EsaCciOdpDataStore('test-odp', index_cache_json_dict=json_dict, index_cache_update_tag='test2') return DS
def _create_test_data_store(): with open(os.path.join(os.path.dirname(__file__), '..', 'ds', 'resources', 'os-data-list.json')) as fp: json_text = fp.read() json_dict = json.loads(json_text) metadata_path = os.path.join(os.path.dirname(__file__), '..', 'ds', 'resources', 'datasources', 'metadata') # The EsaCciOdpDataStore created with an initial json_dict and a metadata dir avoids fetching from remote return EsaCciOdpDataStore('test-odp', index_cache_json_dict=json_dict, index_cache_update_tag='test2', meta_data_store_path=metadata_path)
def _create_test_data_store(): with open(os.path.join(os.path.dirname(__file__), 'resources/os-data-list.json')) as fp: json_text = fp.read() json_dict = json.loads(json_text) with open(os.path.join(os.path.dirname(__file__), 'resources/drs_ids.txt')) as fp: drs_ids = fp.read().split('\n') for d in DATA_STORE_REGISTRY.get_data_stores(): d.get_updates(reset=True) metadata_path = os.path.join(os.path.dirname(__file__), 'resources/datasources/metadata') # The EsaCciOdpDataStore created with an initial json_dict and a metadata dir avoids fetching from remote data_store = EsaCciOdpDataStore('test-odp', index_cache_json_dict=json_dict, index_cache_update_tag='test1', meta_data_store_path=metadata_path, drs_ids=drs_ids) DATA_STORE_REGISTRY.add_data_store(data_store) return data_store
def test_unsupported_operand_type_fix(self): data_store = EsaCciOdpDataStore() cci_dataset_collection = 'esacci.PERMAFROST.yr.L4.ALT.multi-sensor.multi-platform.MODIS.01-0.r1' data_source = data_store.query(cci_dataset_collection)[0] ds = data_source.open_dataset(time_range=['2010-01-01', '2011-01-30'], var_names=['ALT']) self.assertIsNotNone(ds)
def test_query_web_access(self): store = EsaCciOdpDataStore() all_data_sources = store.query() self.assertIsNotNone(all_data_sources)
def test_unconverted_time(self): data_store = EsaCciOdpDataStore() cci_dataset_collection = 'esacci.OC.5-days.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.4-2.sinusoidal' data_source = data_store.query(cci_dataset_collection)[0] ds = data_source.open_dataset(time_range=['2010-01-01', '2010-01-30'], var_names=['CHLOR_A']) self.assertIsNotNone(ds)
def test_normalization_of_time(self): data_store = EsaCciOdpDataStore() cci_dataset_collection = 'esacci.ICESHEETS.yr.Unspecified.GMB.GRACE-instrument.GRACE.UNSPECIFIED.1-2.greenland_gmb_mass_trends' data_source = data_store.query(cci_dataset_collection)[0] ds = data_source.open_dataset(time_range=['2005-07-02', '2005-07-02'], var_names=['GMB_trend']) self.assertIsNotNone(ds)