def load_datasets_from_config(extra_opts, *loader_opts): ''' Generic dataset loading function. ''' for opt in loader_opts: loader_name = opt['loader_name'] if loader_name == 'esgf': if extra_opts['password'] is None: extra_opts['username'] = raw_input('Enter your ESGF OpenID:\n') extra_opts['password'] = getpass( prompt='Enter your ESGF password:\n') opt['esgf_username'] = extra_opts['username'] opt['esgf_password'] = extra_opts['password'] elif loader_name == 'rcmed': opt['min_lat'] = extra_opts['min_lat'] opt['max_lat'] = extra_opts['max_lat'] opt['min_lon'] = extra_opts['min_lon'] opt['max_lon'] = extra_opts['max_lon'] opt['start_time'] = extra_opts['start_time'] opt['end_time'] = extra_opts['end_time'] loader = DatasetLoader(*loader_opts) loader.load_datasets() return loader.datasets
def testExistingDataSource(self): ''' Ensures that existing data source loaders can be added ''' self.loader = DatasetLoader(self.config) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values)
def testNewDataSource(self): ''' Ensures that custom data source loaders can be added ''' self.loader = DatasetLoader(self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values2)
def testMultipleDataSources(self): ''' Test for when multiple dataset configs are specified ''' self.loader = DatasetLoader(self.config, self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') self.assertEqual(self.loader.datasets[1].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values) np.testing.assert_array_equal(self.loader.datasets[1].values, self.values2)
class TestDatasetLoader(unittest.TestCase): def setUp(self): # Read netCDF file self.file_path = create_netcdf_object() self.netCDF_file = netCDF4.Dataset(self.file_path, 'r') self.latitudes = self.netCDF_file.variables['latitude'][:] self.longitudes = self.netCDF_file.variables['longitude'][:] self.times = self.netCDF_file.variables['time'][:] self.alt_lats = self.netCDF_file.variables['alt_lat'][:] self.alt_lons = self.netCDF_file.variables['alt_lon'][:] self.values = self.netCDF_file.variables['value'][:] self.values2 = self.values + 1 # Set up config self.config = {'file_path': self.file_path, 'variable_name': 'value'} self.new_data_source_config = { 'loader_name': 'foo', 'lats': self.latitudes, 'lons': self.longitudes, 'times': self.times, 'values': self.values2, 'variable': 'value' } def tearDown(self): os.remove(self.file_path) def testNewDataSource(self): ''' Ensures that custom data source loaders can be added ''' self.loader = DatasetLoader(self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values2) def testExistingDataSource(self): ''' Ensures that existing data source loaders can be added ''' self.loader = DatasetLoader(self.config) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values) def testMultipleDataSources(self): ''' Test for when multiple dataset configs are specified ''' self.loader = DatasetLoader(self.config, self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') self.assertEqual(self.loader.datasets[1].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values) np.testing.assert_array_equal(self.loader.datasets[1].values, self.values2)
class TestDatasetLoader(unittest.TestCase): def setUp(self): # Read netCDF file self.file_path = create_netcdf_object() self.netCDF_file = netCDF4.Dataset(self.file_path, 'r') self.latitudes = self.netCDF_file.variables['latitude'][:] self.longitudes = self.netCDF_file.variables['longitude'][:] self.times = self.netCDF_file.variables['time'][:] self.alt_lats = self.netCDF_file.variables['alt_lat'][:] self.alt_lons = self.netCDF_file.variables['alt_lon'][:] self.values = self.netCDF_file.variables['value'][:] self.values2 = self.values + 1 # Set up config self.config = {'file_path': self.file_path, 'variable_name': 'value'} self.new_data_source_config = {'loader_name': 'foo', 'lats': self.latitudes, 'lons': self.longitudes, 'times': self.times, 'values': self.values2, 'variable': 'value'} def tearDown(self): os.remove(self.file_path) def testNewDataSource(self): ''' Ensures that custom data source loaders can be added ''' self.loader = DatasetLoader(self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values2) def testExistingDataSource(self): ''' Ensures that existing data source loaders can be added ''' self.loader = DatasetLoader(self.config) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values) def testMultipleDataSources(self): ''' Test for when multiple dataset configs are specified ''' self.loader = DatasetLoader(self.config, self.new_data_source_config) # Here the data_source "foo" represents the Dataset constructor self.loader.add_source_loader('foo', build_dataset) self.loader.load_datasets() self.assertEqual(self.loader.datasets[0].origin['source'], 'local') self.assertEqual(self.loader.datasets[1].origin['source'], 'foo') np.testing.assert_array_equal(self.loader.datasets[0].values, self.values) np.testing.assert_array_equal(self.loader.datasets[1].values, self.values2)