def test_load_all_lcc(self, pg): """Test loading all test datasets with lcc projections.""" lons = np.array([ [12.19, 0, 0, 0, 14.34208538], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [54.56534318, 0, 0, 0, 57.32843565]]) lats = np.array([ [-133.459, 0, 0, 0, -65.12555139], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [-152.8786225, 0, 0, 0, -49.41598659]]) pg.open.return_value = FakeGRIB( proj_params={ 'a': 6371229, 'b': 6371229, 'proj': 'lcc', 'lon_0': 265.0, 'lat_0': 25.0, 'lat_1': 25.0, 'lat_2': 25.0}, latlons=(lats, lons)) from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load([ DataQuery(name='t', level=100, modifiers=tuple()), DataQuery(name='t', level=200, modifiers=tuple()), DataQuery(name='t', level=300, modifiers=tuple())]) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') self.assertIsInstance(v, xr.DataArray)
def _find_compositor(self, dataset_key): """Find the compositor object for the given dataset_key.""" # NOTE: This function can not find a modifier that performs # one or more modifications if it has modifiers see if we can find # the unmodified version first src_node = None if isinstance(dataset_key, DataQuery) and dataset_key.get('modifiers'): new_dict = dataset_key.to_dict() new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) new_prereq = DataQuery.from_dict(new_dict) src_node, u = self._find_dependencies(new_prereq) # Update the requested DatasetQuery with information from the src if src_node is not None: dataset_key = self._update_modifier_id(dataset_key, src_node.name) if u: return None, u elif isinstance(dataset_key, str): dataset_key = DataQuery(name=dataset_key) try: compositor = self.get_compositor(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format( str(dataset_key))) cid = compositor.id root = Node(cid, data=(compositor, [], [])) if src_node is not None: self.add_child(root, src_node) root.data[1].append(src_node) query = cid.create_dep_filter(dataset_key) # 2.1 get the prerequisites LOG.trace( "Looking for composite prerequisites for: {}".format(dataset_key)) prereqs, unknowns = self._get_compositor_prereqs( root, compositor.attrs['prerequisites'], query=query) if unknowns: # Should we remove all of the unknown nodes that were found ? # if there is an unknown prerequisite are we in trouble? return None, unknowns root.data[1].extend(prereqs) # Get the optionals LOG.trace( "Looking for optional prerequisites for: {}".format(dataset_key)) optional_prereqs, _ = self._get_compositor_prereqs( root, compositor.attrs['optional_prerequisites'], skip=True, query=query) root.data[2].extend(optional_prereqs) return root, set()
def _create_dataset_ids(self, keys): from itertools import product ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): id_keys = [keys[k]['id_key'] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) ds_info['file_type'] = self.filetype_info['file_type'] self._msg_datasets[msg_id] = ds_info
def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery DataQuery(name='cheese_shops') # Check repr did = DataQuery(name='VIS008', resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality assert DataQuery(wavelength=10) != DataQuery(name="VIS006")
def test_missing_attributes(self, proj_params, lon_corners, lat_corners): """Check that the grib reader handles missing attributes in the grib file.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName query_contains = DataQuery(name='t', level=100, modifiers=tuple()) # This does not have modelName query_not_contains = DataQuery(name='t', level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) assert dataset[query_contains].attrs['modelName'] == 'notknown' assert dataset[query_not_contains].attrs['modelName'] == 'unknown'
def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ DataQuery(name='t', level=100, modifiers=tuple()), DataQuery(name='t', level=200, modifiers=tuple()), DataQuery(name='t', level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): assert v.attrs['units'] == 'K' assert isinstance(v, xr.DataArray)
def test_jscanspositively(self, proj_params, lon_corners, lat_corners): """Check that data is flipped if the jScansPositively is present.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively query_not_contains = DataQuery(name='t', level=100, modifiers=tuple()) # This contains jScansPositively query_contains = DataQuery(name='t', level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) np.testing.assert_allclose(fake_gribdata(), dataset[query_contains].values[::-1])
def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict res1 = get_key(make_dataid(name='test4'), d, calibration='radiance') res2 = get_key(make_dataid(name='test4'), d, calibration='radiance', num_results=0) res3 = get_key(make_dataid(name='test4'), d, calibration='radiance', num_results=3) self.assertEqual(len(res2), 1) self.assertEqual(len(res3), 1) res2 = res2[0] res3 = res3[0] self.assertEqual(res1, res2) self.assertEqual(res1, res3) res1 = get_key('test4', d, query=DataQuery(polarization='V')) self.assertEqual( res1, make_dataid(name='test4', calibration='radiance', polarization='V')) res1 = get_key(0.5, d, query=DataQuery(resolution=500)) self.assertEqual( res1, make_dataid(name='testh', wavelength=(0, 0.5, 1), resolution=500)) res1 = get_key('test6', d, query=DataQuery(level=100)) self.assertEqual(res1, make_dataid(name='test6', level=100)) res1 = get_key('test5', d) res2 = get_key('test5', d, query=DataQuery(modifiers=('mod2', ))) res3 = get_key('test5', d, query=DataQuery(modifiers=( 'mod1', 'mod2', ))) self.assertEqual(res1, make_dataid(name='test5', modifiers=('mod2', ))) self.assertEqual(res1, res2) self.assertNotEqual(res1, res3) # more than 1 result when default is to ask for 1 result self.assertRaises(KeyError, get_key, 'test4', d, best=False)
def test_load_all(self, pg): """Test loading all test datasets.""" pg.open.return_value = FakeGRIB() from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load([ DataQuery(name='t', level=100, modifiers=tuple()), DataQuery(name='t', level=200, modifiers=tuple()), DataQuery(name='t', level=300, modifiers=tuple())]) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') self.assertIsInstance(v, xr.DataArray)
def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_q = DataQuery(name='ch01', wavelength=(0.5, 0.6, 0.7, 'µm'), calibration='reflectance', modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) self.assertListEqual(res, [make_dataid(name='lons'), make_dataid(name='lats')])
def test_read_calibrated_dB(self, mocked_rioxarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_rioxarray_open.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), dims=['y', 'x']) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity='dB'), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]])
def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [DataQuery(name='t', level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) area = datasets['t'].attrs['area'] if not hasattr(area, 'crs'): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area)
def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): msg_id = DataQuery(name=msg['shortName'], level=msg['level'], modifiers=tuple()) ds_info = { 'message': idx + 1, 'name': msg['shortName'], 'level': msg['level'], 'file_type': self.filetype_info['file_type'], } self._msg_datasets[msg_id] = ds_info
def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej', )) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() assert not d2.is_modified()
def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej', )) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert not d1.create_less_modified_query()['modifiers'] assert not d2.create_less_modified_query()['modifiers']
def test_read_calibrated_dB(self): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" with mock.patch( 'satpy.readers.sar_c_safe.xr.open_rasterio') as fake_read_band: fake_read_band.return_value = xr.DataArray( da.from_array(np.array([[0, 1], [2, 3]]))) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity='dB'), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]])
def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] for cinfo in ds_info.get('coordinates', []): if not isinstance(cinfo, dict): cinfo = {'name': cinfo} for key in self._co_keys: if key == 'name': continue if key in ds_info: if ds_info[key] is not None: cinfo[key] = ds_info[key] cid = DataQuery.from_dict(cinfo) cids.append(self.get_dataset_key(cid)) return cids
def test_get_calibration_constant(self): """Test getting the calibration constant.""" query = DataQuery(name="calibration_constant", polarization="vv") res = self.calibration_fh.get_dataset(query, {}) assert res == 1
def test_read_lon_lats(self): """Test reading lons and lats.""" class FakeGCP: def __init__(self, *args): self.row, self.col, self.x, self.y, self.z = args gcps = [ FakeGCP(0, 0, 0, 0, 0), FakeGCP(0, 3, 1, 0, 0), FakeGCP(3, 0, 0, 1, 0), FakeGCP(3, 3, 1, 1, 0), FakeGCP(0, 7, 2, 0, 0), FakeGCP(3, 7, 2, 1, 0), FakeGCP(7, 7, 2, 2, 0), FakeGCP(7, 3, 1, 2, 0), FakeGCP(7, 0, 0, 2, 0), FakeGCP(0, 15, 3, 0, 0), FakeGCP(3, 15, 3, 1, 0), FakeGCP(7, 15, 3, 2, 0), FakeGCP(15, 15, 3, 3, 0), FakeGCP(15, 7, 2, 3, 0), FakeGCP(15, 3, 1, 3, 0), FakeGCP(15, 0, 0, 3, 0), ] crs = dict(init='epsg:4326') self.mocked_rio_open.return_value.gcps = [gcps, crs] self.mocked_rio_open.return_value.shape = [16, 16] query = DataQuery(name="longitude", polarization="vv") xarr = self.test_fh.get_dataset(query, info=dict()) expected = np.array([ [ 3.79492915e-16, 5.91666667e-01, 9.09722222e-01, 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, 1.00000000e+00 ], [ 1.19166667e+00, 1.32437500e+00, 1.36941964e+00, 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, 7.41666667e-01 ], [ 1.82638889e+00, 1.77596726e+00, 1.72667765e+00, 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, 7.60912698e-01 ], [ 2.00000000e+00, 1.99166667e+00, 1.99305556e+00, 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, 1.00000000e+00 ], [ 1.80833333e+00, 2.01669643e+00, 2.18011267e+00, 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, 1.40119048e+00 ], [ 1.34722222e+00, 1.89627976e+00, 2.29940830e+00, 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, 1.90674603e+00 ], [ 7.12500000e-01, 1.67563988e+00, 2.36250177e+00, 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, 2.45892857e+00 ], [ 5.55111512e-16, 1.40000000e+00, 2.38095238e+00, 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, 3.00000000e+00 ], [ -6.94444444e-01, 1.11458333e+00, 2.36631944e+00, 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, 3.47222222e+00 ], [ -1.27500000e+00, 8.64613095e-01, 2.33016227e+00, 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, 3.81785714e+00 ], [ -1.64583333e+00, 6.95312500e-01, 2.28404018e+00, 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, 3.97916667e+00 ], [ -1.71111111e+00, 6.51904762e-01, 2.23951247e+00, 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, 3.89841270e+00 ], [ -1.37500000e+00, 7.79613095e-01, 2.20813846e+00, 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, 3.51785714e+00 ], [ -5.41666667e-01, 1.12366071e+00, 2.20147747e+00, 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, 2.77976190e+00 ], [ 8.84722222e-01, 1.72927083e+00, 2.23108879e+00, 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, 1.62638889e+00 ], [ 3.00000000e+00, 2.64166667e+00, 2.30853175e+00, 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, 0.00000000e+00 ] ]) np.testing.assert_allclose(xarr.values, expected)
def test_get_noise_dataset_has_right_chunk_size(self): """Test using get_dataset for the noise has right chunk size in result.""" query = DataQuery(name="noise", polarization="vv") res = self.noise_fh.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3)
def make_dsq(**items): """Make a dataset query.""" return DataQuery(**items)
def test_get_calibration_dataset_has_right_chunk_size(self): """Test using get_dataset for the calibration yields array with right chunksize.""" query = DataQuery(name="gamma", polarization="vv") res = self.calibration_fh.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) np.testing.assert_allclose(res, self.expected_gamma)
def test_id_query_interactions(): """Test interactions between DataIDs and DataQuery's.""" from satpy.dataset.dataid import DataQuery, DataID, WavelengthRange, ModifierTuple, minimal_default_keys_config default_id_keys_config = { 'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': ['reflectance', 'brightness_temperature', 'radiance', 'counts'] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } # Check hash equality dq = DataQuery(modifiers=tuple(), name='cheese_shops') did = DataID(default_id_keys_config, name='cheese_shops') assert hash(dq) == hash(did) # Check did filtering did2 = DataID(default_id_keys_config, name='ni') res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [ DataID(default_id_keys_config, name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) ] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [ DataID(minimal_default_keys_config, name='natural_color') ] dq = DataQuery(name='natural_color', resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 dq = make_dsq(wavelength=0.22, modifiers=('mod1', )) did = make_cid(name='static_image') assert len(dq.filter_dataids([did])) == 0 # Check did sorting dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') did = DataID(default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) did2 = DataID(default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) dq = DataQuery(name='cheese_shops') did = DataID(default_id_keys_config, name='cheese_shops', resolution=200) did2 = DataID(default_id_keys_config, name='cheese_shops', resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] did = DataID(default_id_keys_config, name='cheese_shops', calibration='counts') did2 = DataID(default_id_keys_config, name='cheese_shops', calibration='reflectance') dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] did = DataID(default_id_keys_config, name='cheese_shops', modifiers=tuple()) did2 = DataID(default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] # Check (in)equality assert DataQuery(wavelength=10) != DataID(default_id_keys_config, name="VIS006")
def test_incidence_angle(self): """Test reading the incidence angle.""" query = DataQuery(name="incidence_angle", polarization="vv") res = self.annotation_fh.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046)
def test_get_calibration_dataset(self): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") res = self.calibration_fh.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma)
def test_get_noise_dataset(self): """Test using get_dataset for the noise.""" query = DataQuery(name="noise", polarization="vv") res = self.noise_fh.get_dataset(query, {}) np.testing.assert_allclose( res, self.expected_azimuth_noise * self.expected_range_noise)
def _create_data_query(product, res): return DataQuery(name=product, resolution=res, modifiers=DEFAULT)