def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy import DatasetID area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': [DatasetID(name='1', calibration='reflectance'), DatasetID(name='2', calibration='reflectance')]} ) return ds1
def test_excs(self, reader_configs, caplog): """Test that exceptions are raised where expected.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) fhs = reader.create_filehandlers(loadables) with pytest.raises(ValueError): fhs["fci_l1c_fdhsi"][0].get_dataset(DatasetID(name="invalid"), {}) with pytest.raises(ValueError): fhs["fci_l1c_fdhsi"][0]._get_dataset_quality( DatasetID(name="invalid"), {}) with caplog.at_level(logging.ERROR): fhs["fci_l1c_fdhsi"][0].get_dataset( DatasetID(name="ir_123", calibration="unknown"), {"units": "unknown"}) assert "unknown calibration key" in caplog.text
def test_load_all_i_radiances(self): """Load all I band radiances.""" from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load([ DatasetID(name='I01', calibration='radiance', modifiers=None), DatasetID(name='I02', calibration='radiance', modifiers=None), DatasetID(name='I03', calibration='radiance', modifiers=None), DatasetID(name='I04', calibration='radiance', modifiers=None), DatasetID(name='I05', calibration='radiance', modifiers=None), ]) self.assertEqual(len(ds), 5) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'radiance') self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') self.assertEqual(d.attrs['rows_per_scan'], 32) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area'])
def test_load_longitude_latitude(self): """Test that longitude and latitude datasets are loaded correctly.""" from satpy import DatasetID def test_func(dname, x, y): if dname == 'longitude': # assert less np.testing.assert_array_less(x, y) else: # assert greater # np.testing.assert_equal(x > y, True) np.testing.assert_array_less(y, x) scene = Scene(reader='modis_l2', filenames=[self.file_name]) for dataset_name in ['longitude', 'latitude']: # Default resolution should be the interpolated 1km scene.load([dataset_name]) longitude_1km_id = DatasetID(name=dataset_name, resolution=1000) longitude_1km = scene[longitude_1km_id] self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) test_func(dataset_name, longitude_1km.values, 0) # Specify original 5km scale scene.load([dataset_name], resolution=5000) longitude_5km_id = DatasetID(name=dataset_name, resolution=5000) longitude_5km = scene[longitude_5km_id] self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape) test_func(dataset_name, longitude_5km.values, 0)
def test_fy3d_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = DatasetID(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(8, len(res)) self.assertEqual((2 * 10, 2048), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 10, 2048), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 10, 2048), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 10, 2048), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('reflectance', res['5'].attrs['calibration']) self.assertEqual('%', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('brightness_temperature', res['20'].attrs['calibration']) self.assertEqual('K', res['20'].attrs['units']) self.assertEqual((2 * 10, 2048), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 10, 2048), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units'])
def test_properties(self): """Test basic properties/attributes of the MultiScene.""" from satpy import MultiScene, DatasetID area = _create_test_area() scenes = _create_test_scenes(area=area) ds1_id = DatasetID(name='ds1') ds2_id = DatasetID(name='ds2') ds3_id = DatasetID(name='ds3') ds4_id = DatasetID(name='ds4') # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') mscn = MultiScene(scenes) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertTrue(mscn.all_same_area) bigger_area = _create_test_area(shape=(20, 40)) scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), area=bigger_area) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id, ds4_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertFalse(mscn.all_same_area)
def test_load_all_lcc(self, pg): """Test loading all test datasets with lcc projections""" lons = np.array([ [12.19, 0, 0, 0, 14.34208538], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [54.56534318, 0, 0, 0, 57.32843565]]) lats = np.array([ [-133.459, 0, 0, 0, -65.12555139], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [-152.8786225, 0, 0, 0, -49.41598659]]) pg.open.return_value = FakeGRIB( proj_params={ 'a': 6371229, 'b': 6371229, 'proj': 'lcc', 'lon_0': 265.0, 'lat_0': 25.0, 'lat_1': 25.0, 'lat_2': 25.0}, latlons=(lats, lons)) from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load([ DatasetID(name='t', level=100), DatasetID(name='t', level=200), DatasetID(name='t', level=300)]) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') self.assertIsInstance(v, xr.DataArray)
def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import (NCOLCIBase, NCOLCICal, NCOLCIGeo, NCOLCIChannelBase, NCOLCI1B, NCOLCI2) from satpy import DatasetID import xarray as xr cal_data = xr.Dataset( { 'solar_flux': (('bands'), [0, 1, 2]), 'detector_index': (('bands'), [0, 1, 2]), }, { 'bands': [0, 1, 2], }, ) ds_id = DatasetID(name='Oa01', calibration='reflectance') ds_id2 = DatasetID(name='wsqf', calibration='reflectance') filename_info = { 'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0 } test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, {'nc_key': 'the_key'}) test.get_dataset(ds_id2, {'nc_key': 'the_key'}) mocked_dataset.assert_called() mocked_dataset.reset_mock()
def test_fy4a_all_resolutions(self): """Test loading data when all resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) available_datasets = reader.available_dataset_ids # 500m band_names = ['C' + '%02d' % ch for ch in np.linspace(2, 2, 1)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) # 1km band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) # 2km band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 7, 7)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) if band_name < 'C07': self.assertEqual(2, len(res)) else: self.assertEqual(3, len(res)) band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)] res = reader.load(band_names) self.assertEqual(14, len(res)) for band_name in band_names: self.assertEqual((2, 5), res[band_name].shape) if band_name < 'C07': self.assertEqual('reflectance', res[band_name].attrs['calibration']) else: self.assertEqual('brightness_temperature', res[band_name].attrs['calibration']) if band_name < 'C07': self.assertEqual('%', res[band_name].attrs['units']) else: self.assertEqual('K', res[band_name].attrs['units'])
def test_fy4a_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that the resolution is only 1km available_datasets = reader.available_dataset_ids band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=4000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) res = reader.load(band_names) self.assertEqual(3, len(res)) expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]) } for index, band_name in enumerate(band_names): self.assertEqual(1, res[band_name].attrs['sensor'].islower()) self.assertEqual((2, 5), res[band_name].shape) self.assertEqual('reflectance', res[band_name].attrs['calibration']) self.assertEqual('%', res[band_name].attrs['units']) self.assertTrue( np.allclose(res[band_name].values, expected[index + 1], equal_nan=True))
def test_read_geo(self): """Test read_geo() function""" import h5py from satpy.readers.iasi_l2 import read_geo from satpy import DatasetID with h5py.File(self.fname, 'r') as fid: key = DatasetID(name='sensing_time') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) key = DatasetID(name='latitude') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH))
def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct""" lon = self.reader.get_dataset(key=DatasetID(name='longitude', calibration=None), info={}) lat = self.reader.get_dataset(key=DatasetID(name='latitude', calibration=None), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(lat.to_masked_array() == self.lat), msg='get_dataset() returns invalid latitude') self.assertTrue(np.all(lon.to_masked_array() == self.lon), msg='get_dataset() returns invalid longitude')
def test_load_longitude_latitude(self): """Test that longitude and latitude datasets are loaded correctly.""" from satpy import DatasetID scene = Scene(reader='modis_l2', filenames=[self.file_name]) for dataset_name in ['longitude', 'latitude']: # Default resolution should be the interpolated 1km scene.load([dataset_name]) longitude_1km_id = DatasetID(name=dataset_name, resolution=1000) longitude_1km = scene[longitude_1km_id] self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) # Specify original 5km scale longitude_5km = scene.load([dataset_name], resolution=5000) longitude_5km_id = DatasetID(name=dataset_name, resolution=5000) longitude_5km = scene[longitude_5km_id] self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape)
def test_get_dataset(self): """Test get_dataset() for different datasets""" from satpy import DatasetID info = {'eggs': 'spam'} key = DatasetID(name='pressure') data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) self.assertTrue('eggs' in data.attrs) self.assertEqual(data.attrs['eggs'], 'spam') key = DatasetID(name='emissivity') data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) key = DatasetID(name='sensing_time') data = self.reader.get_dataset(key, info).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH))
def test_load_reflectance(self, reader_configs): """Test loading with reflectance.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load([ DatasetID(name=name, calibration="reflectance") for name in self._chans["solar"] ]) assert 8 == len(res) for ch in self._chans["solar"]: assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_equal(res[ch], 100 * 15 * 1 * np.pi / 50)
def test_load_counts(self, reader_configs): """Test loading with counts.""" from satpy import DatasetID from satpy.readers import load_reader # testing two filenames to test correctly combined filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114442_GTT_DEV_" "20170410113934_20170410113942_N__C_0070_0068.nc", ] reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load([ DatasetID(name=name, calibration="counts") for name in self._chans["solar"] + self._chans["terran"] ]) assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch].shape == (200 * 2, 11136) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" assert res[ch].attrs["units"] == "1" if ch == 'ir_38': numpy.testing.assert_array_equal(res[ch][~0], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) else: numpy.testing.assert_array_equal(res[ch], 1)
def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR self.reader.meta.update({'lon0': -75.0, 'lat0': 0.0, 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uni': 'some_area'}) attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE, 'yaw_flip': True}, 'satellite_longitude': -75.0, 'satellite_latitude': 0.0, 'satellite_altitude': ALTITUDE, 'platform_name': 'GOES-15', 'sensor': 'goes_imager', 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uniform_sampling': 'some_area'} for ch in self.channels: counts = self.reader.get_dataset( key=DatasetID(name=ch, calibration='counts'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), msg='get_dataset() returns invalid counts for ' 'channel {}'.format(ch)) # Check attributes self.assertDictEqual(counts.attrs, attrs_exp)
def _init_summary_page(self): # we are going to use the id table to get our summary # so make sure the values are correct if self.ui.selectByTabWidget.currentIndex() != BY_ID_TAB: self.ui.selectByTabWidget.setCurrentIndex(BY_ID_TAB) selected_text = [] selected_ids = [] id_format = "| {name:<20s} | {level:>8s} |" header_format = "| {name:<20s} | {level:>8s} |" header_line = "|-{0:-^20s}-|-{0:-^8s}-|".format('-') for item_idx in range(self.ui.selectIDTable.rowCount()): name_item = self.ui.selectIDTable.item(item_idx, 0) level_item = self.ui.selectIDTable.item(item_idx, 1) if name_item.checkState(): name = name_item.data(QtCore.Qt.UserRole) level = level_item.data(QtCore.Qt.UserRole) selected_ids.append(DatasetID(name=name, level=level)) selected_text.append(id_format.format( name=name_item.text(), level=level_item.text(), )) self.selected_ids = selected_ids summary_text = """Products to be loaded: {} """.format(len(selected_ids)) header = header_format.format(name="Name", level="Level") summary_text += "\n".join([header, header_line] + selected_text) self.ui.productSummaryText.setText(summary_text)
def test_read_dataset(self): """Test read_dataset() function""" import h5py from satpy.readers.iasi_l2 import read_dataset from satpy import DatasetID with h5py.File(self.fname, 'r') as fid: key = DatasetID(name='pressure') data = read_dataset(fid, key).compute() self.check_pressure(data) key = DatasetID(name='emissivity') data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes key = DatasetID(name='ozone_total_column') data = read_dataset(fid, key).compute() self.assertEqual(len(data.attrs), 0)
def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" from satpy.readers.olci_nc import NCOLCI2 from satpy import DatasetID import numpy as np import xarray as xr mocked_dataset.return_value = xr.Dataset( { 'mask': (['rows', 'columns'], np.array([1 << x for x in range(30) ]).reshape(5, 6)) }, coords={ 'rows': np.arange(5), 'columns': np.arange(6) }) ds_id = DatasetID(name='mask') filename_info = { 'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0 } test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') res = test.get_dataset(ds_id, {'nc_key': 'mask'}) self.assertEqual(res.dtype, np.dtype('bool'))
def test_get_area_def(self): """Get the area definition.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy import DatasetID filename_info = {} filetype_info = {} dsid = DatasetID(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) area_def = test.get_area_def(dsid) aext_res = AREA_DEF_DICT['area_extent'] for i in range(4): self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) proj_dict = AREA_DEF_DICT['proj_dict'] self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id)
def test_load_bt(self, reader_configs, caplog): """Test loading with bt.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) with caplog.at_level(logging.WARNING): res = reader.load([ DatasetID(name=name, calibration="brightness_temperature") for name in self._chans["terran"] ]) assert caplog.text == "" for ch in self._chans["terran"]: assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" if ch == 'ir_38': numpy.testing.assert_array_almost_equal( res[ch][~0], 209.68274099) numpy.testing.assert_array_almost_equal( res[ch][0], 1888.851296) else: numpy.testing.assert_array_almost_equal(res[ch], 209.68274099)
def test_load_bt(self): """Test loading with bt """ from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load([ DatasetID(name=name, calibration="brightness_temperature") for name in self._chans["terran"] ]) self.assertEqual(8, len(res)) for ch in self._chans["terran"]: self.assertEqual(res[ch].shape, (200, 11136)) self.assertEqual(res[ch].dtype, np.float64) self.assertEqual(res[ch].attrs["calibration"], "brightness_temperature") self.assertEqual(res[ch].attrs["units"], "K") numpy.testing.assert_array_almost_equal(res[ch], 181.917084)
def test_get_dataset_invalid(self): """Test handling of invalid calibrations""" # VIS -> BT args = dict(key=DatasetID(name='00_7', calibration='brightness_temperature'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance args = dict(key=DatasetID(name='10_7', calibration='reflectance'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration args = dict(key=DatasetID(name='10_7', calibration='invalid'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args)
def test_get_dataset(self): """Test basic L2 load.""" from satpy import DatasetID key = DatasetID(name='HT') res = self.reader.get_dataset(key, {'file_key': 'HT'}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = { 'instrument_ID': None, 'modifiers': (), 'name': 'HT', 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'satellite_altitude': 35786020., 'satellite_latitude': 0.0, 'satellite_longitude': -89.5, 'scan_mode': 'M3', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'm' } self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) self.assertDictEqual(dict(res.attrs), exp_attrs)
def test_get_dataset(self): """Test the get_dataset method.""" from satpy import DatasetID key = DatasetID(name='flash_extent_density') res = self.reader.get_dataset(key, {'info': 'info'}) exp = { 'instrument_ID': None, 'modifiers': (), 'name': 'flash_extent_density', 'orbital_parameters': { 'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, # 'satellite_nominal_altitude': 35786.02, 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5 }, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'glm', 'timeline_ID': None, 'grid_mapping': 'goes_imager_projection', 'standard_name': 'flash_extent_density', 'long_name': 'Flash extent density', 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min' } self.assertDictEqual(res.attrs, exp)
def test_fy4a_counts_calib(self): """Test loading data at counts calibration.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)] for band_name in band_names: ds_ids.append(DatasetID(name=band_name, calibration='counts')) res = reader.load(ds_ids) self.assertEqual(14, len(res)) for band_name in band_names: self.assertEqual((2, 5), res[band_name].shape) self.assertEqual('counts', res[band_name].attrs['calibration']) self.assertEqual(res[band_name].dtype, np.uint16) self.assertEqual('1', res[band_name].attrs['units'])
def test_save_array(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict( start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) try: handle, filename = tempfile.mkstemp() os.close(handle) scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['test-array'][:] == [1, 2, 3])) expected_prereq = ( "DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'][0], np.string_(expected_prereq)) finally: os.remove(filename)
def test_navigation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = DatasetID(name='longitude') res = fh.get_dataset(key, info) assert (np.all(res == 0)) key = DatasetID(name='latitude') res = fh.get_dataset(key, info) assert (np.all(res == 0))
def test_get_dataset(self): """Test the get_dataset method.""" from satpy import DatasetID key = DatasetID(name='Rad', calibration='radiance') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'calibration': 'radiance', 'instrument_ID': None, 'modifiers': (), 'name': 'Rad', 'observation_type': 'Rad', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, 'satellite_nominal_altitude': 35786020., 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5, 'yaw_flip': True}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'W m-2 um-1 sr-1'} self.assertDictEqual(res.attrs, exp) # we remove any time dimension information self.assertNotIn('t', res.coords) self.assertNotIn('t', res.dims) self.assertNotIn('time', res.coords) self.assertNotIn('time', res.dims)