def _get_writers_and_frames(self, filename, datasets, fill_value, ignore_missing, enh_args, imio_args): """Get writers and frames. Helper function for save_animation. """ scene_gen = self._scene_gen first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] if 'end_time' in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") scenes = list(scenes) info_scenes.append(scenes[-1]) available_ds = [first_scene.get(ds) for ds in first_scene.wishlist] available_ds = [ DataID.from_dataarray(ds) for ds in available_ds if ds is not None ] dataset_ids = datasets or available_ds if not dataset_ids: raise RuntimeError( "No datasets found for saving (resampling may be needed to generate composites)" ) writers = {} frames = {} for dataset_id in dataset_ids: if not self.is_generator and not self._all_same_area([dataset_id]): raise ValueError("Sub-scene datasets must all be on the same " "area (see the 'resample' method).") all_datasets = scene_gen[dataset_id] info_datasets = [scn.get(dataset_id) for scn in info_scenes] this_fn, shape, this_fill = self._get_animation_info( info_datasets, filename, fill_value=fill_value) data_to_write = self._get_animation_frames(all_datasets, shape, this_fill, ignore_missing, enh_args) writer = imageio.get_writer(this_fn, **imio_args) frames[dataset_id] = data_to_write writers[dataset_id] = writer return (writers, frames)
def test_basic_init(self): """Test basic ways of creating a DataID.""" from satpy.dataset import DataID, default_id_keys_config as dikc, minimal_default_keys_config as mdkc did = DataID(dikc, name="a") assert did['name'] == 'a' assert did['modifiers'] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) DataID(dikc, name="a", calibration='radiance') DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration='radiance') DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration='radiance', modifiers=('sunz_corrected',)) with pytest.raises(ValueError): DataID(dikc, wavelength=0.86) did = DataID(mdkc, name='comp24', resolution=500) assert did['resolution'] == 500
def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" if data_id['name'] == 'ds9_fail_load': raise KeyError("Can't load '{}' because it is supposed to " "fail.".format(data_id['name'])) attrs = data_id.to_dict() attrs.update(ds_info) attrs['sensor'] = self.filetype_info.get('sensor', 'fake_sensor') attrs['platform_name'] = 'fake_platform' attrs['start_time'] = self.start_time attrs['end_time'] = self.end_time res = attrs.get('resolution', 250) rows = cols = { 250: 20, 500: 10, 1000: 5, }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, dims=['y', 'x'])
def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items)
def make_cid(**items): """Make a DataID with a minimal set of keys to id composites.""" return DataID(minimal_default_keys_config, **items)
def make_dataid(**items): """Make a DataID with default keys.""" return DataID(default_id_keys_config, **items)
def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset import DataID, default_id_keys_config as dikc with pytest.raises(ValueError): DataID(dikc, name='C05', calibration='_bad_')
def test_id_query_interactions(): """Test interactions between DataIDs and DataQuery's.""" from satpy.dataset import DataQuery, DataID, WavelengthRange, ModifierTuple, minimal_default_keys_config default_id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } # Check hash equality dq = DataQuery(modifiers=tuple(), name='cheese_shops') did = DataID(default_id_keys_config, name='cheese_shops') assert hash(dq) == hash(did) # Check did filtering did2 = DataID(default_id_keys_config, name='ni') res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(default_id_keys_config, name='ds1', resolution=250, calibration='reflectance', modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, name='natural_color')] dq = DataQuery(name='natural_color', resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 dq = make_dsq(wavelength=0.22, modifiers=('mod1',)) did = make_cid(name='static_image') assert len(dq.filter_dataids([did])) == 0 # Check did sorting dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') did = DataID(default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) did2 = DataID(default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) dq = DataQuery(name='cheese_shops') did = DataID(default_id_keys_config, name='cheese_shops', resolution=200) did2 = DataID(default_id_keys_config, name='cheese_shops', resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] did = DataID(default_id_keys_config, name='cheese_shops', calibration='counts') did2 = DataID(default_id_keys_config, name='cheese_shops', calibration='reflectance') dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] did = DataID(default_id_keys_config, name='cheese_shops', modifiers=tuple()) did2 = DataID(default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] # Check (in)equality assert DataQuery(wavelength=10) != DataID(default_id_keys_config, name="VIS006")