예제 #1
0
def save_dataset(scns, fmat, fmat_config, renames):
    """Save one dataset to file, not doing the actual computation."""
    obj = None
    try:
        with prepared_filename(fmat, renames) as filename:
            res = fmat.get('resolution', None)
            kwargs = fmat_config.copy()
            kwargs.pop('fname_pattern', None)
            kwargs.pop('dispatch', None)
            if isinstance(fmat['product'], (tuple, list, set)):
                kwargs.pop('format')
                dsids = []
                for prod in fmat['product']:
                    dsids.append(
                        DatasetID(name=prod, resolution=res, modifiers=None))
                obj = scns[fmat['area']].save_datasets(datasets=dsids,
                                                       filename=filename,
                                                       compute=False,
                                                       **kwargs)
            else:
                dsid = DatasetID(name=fmat['product'],
                                 resolution=res,
                                 modifiers=None)
                obj = scns[fmat['area']].save_dataset(dsid,
                                                      filename=filename,
                                                      compute=False,
                                                      **kwargs)
    except KeyError as err:
        LOG.info('Skipping %s: %s', fmat['product'], str(err))
    else:
        fmat_config['filename'] = renames.get(filename, filename)
    return obj
예제 #2
0
    def test_get_dataset_latlon(self, *mocks):
        from satpy.dataset import DatasetID

        lons = np.ones((3, 3))
        lats = 2 * lons
        reader = self._get_reader_mocked()
        reader.get_lonlat.return_value = lons, lats
        fh = self._get_fh_mocked(
            reader=reader,
            start_line=None,
            end_line=None,
            strip_invalid_coords=False,
            interpolate_coords=True
        )

        # With interpolation of coordinates
        for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]):
            key = DatasetID(name)
            info = {'name': name, 'standard_name': 'my_standard_name'}
            res = fh.get_dataset(key=key, info=info)
            exp = xr.DataArray(exp_data,
                               name=res.name,
                               dims=('y', 'x'),
                               coords={'acq_time': ('y', [0, 1, 2])})
            xr.testing.assert_equal(res, exp)

        # Without interpolation of coordinates
        fh.interpolate_coords = False
        for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]):
            key = DatasetID(name)
            info = {'name': name, 'standard_name': 'my_standard_name'}
            res = fh.get_dataset(key=key, info=info)
            self.assertTupleEqual(res.dims, ('y', 'x_every_eighth'))
예제 #3
0
    def test_get_dataset_angles(self, get_angle, *mocks):
        from satpy.dataset import DatasetID
        from satpy.readers.avhrr_l1b_gaclac import ANGLES

        ones = np.ones((3, 3))
        get_angle.return_value = ones
        reader = self._get_reader_mocked()
        fh = self._get_fh_mocked(
            reader=reader,
            start_line=None,
            end_line=None,
            strip_invalid_coords=False,
            interpolate_coords=True
        )

        # With interpolation of coordinates
        for angle in ANGLES:
            key = DatasetID(angle)
            info = {'name': angle, 'standard_name': 'my_standard_name'}
            res = fh.get_dataset(key=key, info=info)
            exp = xr.DataArray(ones,
                               name=res.name,
                               dims=('y', 'x'),
                               coords={'acq_time': ('y', [0, 1, 2])})
            xr.testing.assert_equal(res, exp)

        # Without interpolation of coordinates
        fh.interpolate_coords = False
        for angle in ANGLES:
            key = DatasetID(angle)
            info = {'name': angle, 'standard_name': 'my_standard_name'}
            res = fh.get_dataset(key=key, info=info)
            self.assertTupleEqual(res.dims, ('y', 'x_every_eighth'))
예제 #4
0
    def test_get_coordinates_for_dataset_keys(self):
        """Test getting coordinates for keys."""
        ds_id1 = DatasetID(name='ch01',
                           wavelength=(0.5, 0.6, 0.7),
                           resolution=None,
                           polarization=None,
                           calibration='reflectance',
                           modifiers=())
        ds_id2 = DatasetID(name='ch02',
                           wavelength=(0.7, 0.75, 0.8),
                           resolution=None,
                           polarization=None,
                           calibration='counts',
                           modifiers=())
        lons = DatasetID(name='lons',
                         wavelength=None,
                         resolution=None,
                         polarization=None,
                         calibration=None,
                         modifiers=())
        lats = DatasetID(name='lats',
                         wavelength=None,
                         resolution=None,
                         polarization=None,
                         calibration=None,
                         modifiers=())

        res = self.reader._get_coordinates_for_dataset_keys(
            [ds_id1, ds_id2, lons])
        expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []}

        self.assertDictEqual(res, expected)
예제 #5
0
파일: test_readers.py 프로젝트: lamby/satpy
 def setUp(self):
     """Create a test DatasetDict."""
     from satpy.dataset import DatasetID
     from satpy.readers import DatasetDict
     self.regular_dict = regular_dict = {
         DatasetID(name="test", wavelength=(0, 0.5, 1), resolution=1000):
         "1",
         DatasetID(name="testh", wavelength=(0, 0.5, 1), resolution=500):
         "1h",
         DatasetID(name="test2", wavelength=(1, 1.5, 2), resolution=1000):
         "2",
         DatasetID(name="test3",
                   wavelength=(1.2, 1.7, 2.2),
                   resolution=1000): "3",
         DatasetID(name="test4", calibration="radiance", polarization="V"):
         "4rad",
         DatasetID(name="test4",
                   calibration="reflectance",
                   polarization="H"): "4refl",
         DatasetID(name="test5", modifiers=('mod1', 'mod2')): "5_2mod",
         DatasetID(name="test5", modifiers=('mod2', )): "5_1mod",
         DatasetID(name='test6', level=100): '6_100',
         DatasetID(name='test6', level=200): '6_200',
     }
     self.test_dict = DatasetDict(regular_dict)
예제 #6
0
 def test_all_dataset_ids(self):
     """Check that all datasets ids are returned."""
     self.assertSetEqual(set(self.reader.all_dataset_ids),
                         {DatasetID(name='ch02',
                                    wavelength=(0.7, 0.75, 0.8),
                                    resolution=None,
                                    polarization=None,
                                    calibration='counts',
                                    modifiers=()),
                          DatasetID(name='ch01',
                                    wavelength=(0.5, 0.6, 0.7),
                                    resolution=None,
                                    polarization=None,
                                    calibration='reflectance',
                                    modifiers=()),
                          DatasetID(name='lons',
                                    wavelength=None,
                                    resolution=None,
                                    polarization=None,
                                    calibration=None,
                                    modifiers=()),
                          DatasetID(name='lats',
                                    wavelength=None,
                                    resolution=None,
                                    polarization=None,
                                    calibration=None,
                                    modifiers=())})
예제 #7
0
    def _process_composite_config(self, composite_name, conf, composite_type,
                                  sensor_id, composite_config, **kwargs):

        compositors = self.compositors[sensor_id]
        modifiers = self.modifiers[sensor_id]

        try:
            options = conf[composite_type][composite_name]
            loader = options.pop('compositor')
        except KeyError:
            if composite_name in compositors or composite_name in modifiers:
                return conf
            raise ValueError("'compositor' missing or empty in %s" %
                             composite_config)

        options['name'] = composite_name
        for prereq_type in ['prerequisites', 'optional_prerequisites']:
            prereqs = []
            for item in options.get(prereq_type, []):
                if isinstance(item, dict):
                    key = DatasetID.from_dict(item)
                    prereqs.append(key)
                else:
                    prereqs.append(item)
            options[prereq_type] = prereqs

        if composite_type == 'composites':
            options.update(**kwargs)
            key = DatasetID.from_dict(options)
            comp = loader(**options)
            compositors[key] = comp
        elif composite_type == 'modifiers':
            modifiers[composite_name] = loader, options
예제 #8
0
파일: scene.py 프로젝트: zhangqrl/satpy
    def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True):
        """Slice scene in-place for the datasets specified."""
        new_datasets = {}
        datasets = (self[ds_id] for ds_id in dataset_ids)
        for ds, parent_ds in dataset_walker(datasets):
            ds_id = DatasetID.from_dict(ds.attrs)
            # handle ancillary variables
            pres = None
            if parent_ds is not None:
                pres = new_datasets[DatasetID.from_dict(parent_ds.attrs)]
            if ds_id in new_datasets:
                replace_anc(ds, pres)
                continue
            if area_only and ds.attrs.get('area') is None:
                new_datasets[ds_id] = ds
                replace_anc(ds, pres)
                continue

            if not isinstance(slice_key, dict):
                # match dimension name to slice object
                key = dict(zip(ds.dims, slice_key))
            else:
                key = slice_key
            new_ds = ds.isel(**key)
            if new_area is not None:
                new_ds.attrs['area'] = new_area

            new_datasets[ds_id] = new_ds
            if parent_ds is None:
                # don't use `__setitem__` because we don't want this to
                # affect the existing wishlist/dep tree
                self.datasets[ds_id] = new_ds
            else:
                replace_anc(new_ds, pres)
예제 #9
0
파일: hdfeos_l1b.py 프로젝트: cuulee/satpy
    def get_dataset(self, key, info, out=None, xslice=None, yslice=None):
        """Get the dataset designated by *key*."""
        if key.name in [
                'solar_zenith_angle', 'solar_azimuth_angle',
                'satellite_zenith_angle', 'satellite_azimuth_angle'
        ]:

            if key.name == 'solar_zenith_angle':
                var = self.sd.select('SolarZenith')
            if key.name == 'solar_azimuth_angle':
                var = self.sd.select('SolarAzimuth')
            if key.name == 'satellite_zenith_angle':
                var = self.sd.select('SensorZenith')
            if key.name == 'satellite_azimuth_angle':
                var = self.sd.select('SensorAzimuth')

            data = xr.DataArray(from_sds(var, chunks=CHUNK_SIZE),
                                dims=['y', 'x']).astype(np.float32)
            data = data.where(data != var._FillValue)
            data = data * np.float32(var.scale_factor)

            data.attrs = info
            return data

        if key.name not in ['longitude', 'latitude']:
            return

        if (self.cache[key.resolution]['lons'] is None
                or self.cache[key.resolution]['lats'] is None):

            lons_id = DatasetID('longitude', resolution=key.resolution)
            lats_id = DatasetID('latitude', resolution=key.resolution)

            lons, lats = self.load([lons_id, lats_id],
                                   interpolate=False,
                                   raw=True)
            if key.resolution != self.resolution:
                from geotiepoints.geointerpolator import GeoInterpolator
                lons, lats = self._interpolate([lons, lats], self.resolution,
                                               lons_id.resolution,
                                               GeoInterpolator)
                lons = np.ma.masked_invalid(np.ascontiguousarray(lons))
                lats = np.ma.masked_invalid(np.ascontiguousarray(lats))
            self.cache[key.resolution]['lons'] = lons
            self.cache[key.resolution]['lats'] = lats

        if key.name == 'latitude':
            data = self.cache[key.resolution]['lats'].filled(np.nan)
            data = xr.DataArray(da.from_array(data,
                                              chunks=(CHUNK_SIZE, CHUNK_SIZE)),
                                dims=['y', 'x'])
        else:
            data = self.cache[key.resolution]['lons'].filled(np.nan)
            data = xr.DataArray(da.from_array(data,
                                              chunks=(CHUNK_SIZE, CHUNK_SIZE)),
                                dims=['y', 'x'])
        data.attrs = info
        return data
예제 #10
0
    def test_compare_no_wl(self):
        """Compare fully qualified wavelength ID to no wavelength ID"""
        from satpy.dataset import DatasetID
        d1 = DatasetID(name="a", wavelength=(0.1, 0.2, 0.3))
        d2 = DatasetID(name="a", wavelength=None)

        # this happens when sorting IDs during dependency checks
        self.assertFalse(d1 < d2)
        self.assertTrue(d2 < d1)
예제 #11
0
파일: scene.py 프로젝트: goodsonr/satpy
    def _resampled_scene(self, new_scn, destination_area, **resample_kwargs):
        """Resample `datasets` to the `destination` area."""
        new_datasets = {}
        datasets = list(new_scn.datasets.values())
        max_area = None
        if isinstance(destination_area, (str, six.text_type)):
            destination_area = get_area_def(destination_area)
        if hasattr(destination_area, 'freeze'):
            try:
                max_area = new_scn.max_area()
                destination_area = destination_area.freeze(max_area)
            except ValueError:
                raise ValueError("No dataset areas available to freeze "
                                 "DynamicAreaDefinition.")

        resamplers = {}
        for dataset, parent_dataset in dataset_walker(datasets):
            ds_id = DatasetID.from_dict(dataset.attrs)
            pres = None
            if parent_dataset is not None:
                pres = new_datasets[DatasetID.from_dict(parent_dataset.attrs)]
            if ds_id in new_datasets:
                replace_anc(dataset, pres)
                continue
            if dataset.attrs.get('area') is None:
                if parent_dataset is None:
                    new_scn.datasets[ds_id] = dataset
                else:
                    replace_anc(dataset, pres)
                continue
            LOG.debug("Resampling %s", ds_id)
            source_area = dataset.attrs['area']
            try:
                slice_x, slice_y = source_area.get_area_slices(
                    destination_area)
                source_area = source_area[slice_y, slice_x]
                dataset = dataset.isel(x=slice_x, y=slice_y)
                assert ('x', source_area.x_size) in dataset.sizes.items()
                assert ('y', source_area.y_size) in dataset.sizes.items()
                dataset.attrs['area'] = source_area
            except NotImplementedError:
                LOG.info("Not reducing data before resampling.")
            if source_area not in resamplers:
                key, resampler = prepare_resampler(source_area,
                                                   destination_area,
                                                   **resample_kwargs)
                resamplers[source_area] = resampler
                self.resamplers[key] = resampler
            kwargs = resample_kwargs.copy()
            kwargs['resampler'] = resamplers[source_area]
            res = resample_dataset(dataset, destination_area, **kwargs)
            new_datasets[ds_id] = res
            if parent_dataset is None:
                new_scn.datasets[ds_id] = res
            else:
                replace_anc(res, pres)
예제 #12
0
    def test_get_file_handlers(self):
        """Test getting filehandler to load a dataset."""
        ds_id1 = DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7),
                           resolution=None, polarization=None,
                           calibration='reflectance', modifiers=())
        self.reader.file_handlers = {'ftype1': 'bla'}

        self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla')

        lons = DatasetID(name='lons',  wavelength=None,
                         resolution=None, polarization=None,
                         calibration=None, modifiers=())
        self.assertEqual(self.reader._get_file_handlers(lons), None)
예제 #13
0
    def get_keys(self,
                 name_or_wl,
                 resolution=None,
                 polarization=None,
                 calibration=None,
                 modifiers=None):
        # Get things that match at least the name_or_wl
        if isinstance(name_or_wl, numbers.Number):
            keys = [
                k for k in self.keys()
                if DatasetID.wavelength_match(k.wavelength, name_or_wl)
            ]
        elif isinstance(name_or_wl, (str, six.text_type)):
            keys = [
                k for k in self.keys()
                if DatasetID.name_match(k.name, name_or_wl)
            ]
        else:
            raise TypeError("First argument must be a wavelength or name")

        if resolution is not None:
            if not isinstance(resolution, (list, tuple)):
                resolution = (resolution, )
            keys = [
                k for k in keys
                if k.resolution is not None and k.resolution in resolution
            ]
        if polarization is not None:
            if not isinstance(polarization, (list, tuple)):
                polarization = (polarization, )
            keys = [
                k for k in keys if k.polarization is not None
                and k.polarization in polarization
            ]
        if calibration is not None:
            if not isinstance(calibration, (list, tuple)):
                calibration = (calibration, )
            keys = [
                k for k in keys
                if k.calibration is not None and k.calibration in calibration
            ]
        if modifiers is not None:
            keys = [
                k for k in keys
                if k.modifiers is not None and k.modifiers == modifiers
            ]

        return keys
예제 #14
0
파일: __init__.py 프로젝트: goodsonr/satpy
def filter_keys_by_dataset_id(did, key_container):
    """Filer provided key iterable by the provided `DatasetID`.

    Note: The `modifiers` attribute of `did` should be `None` to allow for
          **any** modifier in the results.

    Args:
        did (DatasetID): Query parameters to match in the `key_container`.
        key_container (iterable): Set, list, tuple, or dict of `DatasetID`
                                  keys.

    Returns (list): List of keys matching the provided parameters in no
                    specific order.

    """
    keys = iter(key_container)

    for key in DATASET_KEYS:
        if getattr(did, key) is not None:
            if key == "wavelength":
                keys = [k for k in keys
                        if (getattr(k, key) is not None and
                            DatasetID.wavelength_match(getattr(k, key),
                                                       getattr(did, key)))]
            else:
                keys = [k for k in keys
                        if getattr(k, key) is not None and getattr(k, key)
                        == getattr(did, key)]

    return keys
예제 #15
0
    def update_ds_ids_from_file_handlers(self):
        """Add or modify available dataset information.

        Each file handler is consulted on whether or not it can load the
        dataset with the provided information dictionary.
        See
        :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets`
        for more information.

        """
        avail_datasets = self._file_handlers_available_datasets()
        new_ids = {}
        for is_avail, ds_info in avail_datasets:
            # especially from the yaml config
            coordinates = ds_info.get('coordinates')
            if isinstance(coordinates, list):
                # xarray doesn't like concatenating attributes that are
                # lists: https://github.com/pydata/xarray/issues/2060
                ds_info['coordinates'] = tuple(ds_info['coordinates'])

            ds_info.setdefault('modifiers', tuple())  # default to no mods
            ds_id = DatasetID.from_dict(ds_info)
            # all datasets
            new_ids[ds_id] = ds_info
            # available datasets
            # False == we have the file type but it doesn't have this dataset
            # None == we don't have the file type object to ask
            if is_avail:
                self.available_ids[ds_id] = ds_info
        self.all_ids = new_ids
예제 #16
0
파일: yaml_reader.py 프로젝트: zhatin/satpy
    def update_ds_ids_from_file_handlers(self):
        """Update DatasetIDs with information from loaded files.

        This is useful, for example, if dataset resolution may change
        depending on what files were loaded.

        """
        for file_handlers in self.file_handlers.values():
            fh = file_handlers[0]
            # update resolution in the dataset IDs for this files resolution
            res = getattr(fh, 'resolution', None)
            if res is None:
                continue

            for ds_id, ds_info in list(self.ids.items()):
                file_types = ds_info['file_type']
                if not isinstance(file_types, list):
                    file_types = [file_types]
                if fh.filetype_info['file_type'] not in file_types:
                    continue
                if ds_id.resolution is not None:
                    continue
                ds_info['resolution'] = res
                new_id = DatasetID.from_dict(ds_info)
                self.ids[new_id] = ds_info
                del self.ids[ds_id]
예제 #17
0
 def test_available_dataset_ids(self):
     """Get ids of the available datasets."""
     self.reader.file_handlers = ['ftype1']
     self.assertSetEqual(set(self.reader.available_dataset_ids),
                         {DatasetID(name='ch02',
                                    wavelength=(0.7, 0.75, 0.8),
                                    resolution=None,
                                    polarization=None,
                                    calibration='counts',
                                    modifiers=()),
                          DatasetID(name='ch01',
                                    wavelength=(0.5, 0.6, 0.7),
                                    resolution=None,
                                    polarization=None,
                                    calibration='reflectance',
                                    modifiers=())})
예제 #18
0
 def test_init_dict(self):
     """Test DatasetDict init with a regular dict argument."""
     from satpy.dataset import DatasetID
     from satpy.readers import DatasetDict
     regular_dict = {DatasetID(name="test", wavelength=(0, 0.5, 1)): "1", }
     d = DatasetDict(regular_dict)
     self.assertEqual(d, regular_dict)
예제 #19
0
    def test_get_dataset_qual_flags(self, *mocks):
        from satpy.dataset import DatasetID

        qual_flags = np.ones((3, 7))
        reader = self._get_reader_mocked()
        reader.get_qual_flags.return_value = qual_flags
        fh = self._get_fh_mocked(
            reader=reader,
            start_line=None,
            end_line=None,
            strip_invalid_coords=False,
            interpolate_coords=True
        )

        key = DatasetID('qual_flags')
        info = {'name': 'qual_flags'}
        res = fh.get_dataset(key=key, info=info)
        exp = xr.DataArray(qual_flags,
                           name=res.name,
                           dims=('y', 'num_flags'),
                           coords={'acq_time': ('y', [0, 1, 2]),
                                   'num_flags': ['Scan line number',
                                                 'Fatal error flag',
                                                 'Insufficient data for calibration',
                                                 'Insufficient data for calibration',
                                                 'Solar contamination of blackbody in channels 3',
                                                 'Solar contamination of blackbody in channels 4',
                                                 'Solar contamination of blackbody in channels 5']})
        xr.testing.assert_equal(res, exp)
예제 #20
0
def filter_keys_by_dataset_id(did, key_container):
    """Filer provided key iterable by the provided `DatasetID`.

    Note: The `modifiers` attribute of `did` should be `None` to allow for
          **any** modifier in the results.

    Args:
        did (DatasetID): Query parameters to match in the `key_container`.
        key_container (iterable): Set, list, tuple, or dict of `DatasetID`
                                  keys.

    Returns (list): List of keys matching the provided parameters in no
                    specific order.

    """
    keys = iter(key_container)

    for key in DATASET_KEYS:
        if getattr(did, key) is not None:
            if key == "wavelength":
                keys = [k for k in keys
                        if (getattr(k, key) is not None and
                            DatasetID.wavelength_match(getattr(k, key),
                                                       getattr(did, key)))]
            else:
                keys = [k for k in keys
                        if getattr(k, key) is not None and getattr(k, key)
                        == getattr(did, key)]

    return keys
예제 #21
0
    def get_dataset(self, key, info, out=None, xslice=None, yslice=None):
        """Get the dataset designated by *key*."""
        if key.name in [
                'solar_zenith_angle', 'solar_azimuth_angle',
                'satellite_zenith_angle', 'satellite_azimuth_angle'
        ]:

            if key.name == 'solar_zenith_angle':
                var = self.sd.select('SolarZenith')
            if key.name == 'solar_azimuth_angle':
                var = self.sd.select('SolarAzimuth')
            if key.name == 'satellite_zenith_angle':
                var = self.sd.select('SensorZenith')
            if key.name == 'satellite_azimuth_angle':
                var = self.sd.select('SensorAzimuth')

            mask = var[:] == var._FillValue
            data = np.ma.masked_array(var[:] * var.scale_factor, mask=mask)
            return Dataset(data, id=key, **info)

        if key.name not in ['longitude', 'latitude']:
            return

        if (self.cache[key.resolution]['lons'] is None
                or self.cache[key.resolution]['lats'] is None):

            lons_id = DatasetID('longitude', resolution=key.resolution)
            lats_id = DatasetID('latitude', resolution=key.resolution)

            lons, lats = self.load([lons_id, lats_id],
                                   interpolate=False,
                                   raw=True)
            if key.resolution != self.resolution:
                from geotiepoints.geointerpolator import GeoInterpolator
                lons, lats = self._interpolate([lons, lats], self.resolution,
                                               lons_id.resolution,
                                               GeoInterpolator)
                lons = np.ma.masked_invalid(np.ascontiguousarray(lons))
                lats = np.ma.masked_invalid(np.ascontiguousarray(lats))
            self.cache[key.resolution]['lons'] = lons
            self.cache[key.resolution]['lats'] = lats

        if key.name == 'latitude':
            return Dataset(self.cache[key.resolution]['lats'], id=key, **info)
        else:
            return Dataset(self.cache[key.resolution]['lons'], id=key, **info)
예제 #22
0
 def test_get_area_def(self):
     """Test getting the area def."""
     area = self.reader.get_area_def(DatasetID('HRV'))
     self.assertEqual(area.area_extent,
                      (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356))
     self.assertEqual(area.proj_dict, {'a': 6378169.0,
                                       'b': 6356583.8,
                                       'h': 35785831.0,
                                       'lon_0': 44,
                                       'proj': 'geos',
                                       'units': 'm'})
     self.reader.fill_hrv = False
     area = self.reader.get_area_def(DatasetID('HRV'))
     self.assertEqual(area.defs[0].area_extent,
                      (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356))
     self.assertEqual(area.defs[1].area_extent,
                      (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604))
예제 #23
0
 def test_available_dataset_ids(self):
     """Get ids of the available datasets."""
     loadables = self.reader.select_files_from_pathnames(['a001.bla'])
     self.reader.create_filehandlers(loadables)
     self.assertSetEqual(set(self.reader.available_dataset_ids),
                         {DatasetID(name='ch02',
                                    wavelength=(0.7, 0.75, 0.8),
                                    resolution=None,
                                    polarization=None,
                                    calibration='counts',
                                    modifiers=()),
                          DatasetID(name='ch01',
                                    wavelength=(0.5, 0.6, 0.7),
                                    resolution=None,
                                    polarization=None,
                                    calibration='reflectance',
                                    modifiers=())})
예제 #24
0
파일: scene.py 프로젝트: zhangqrl/satpy
    def _generate_composite(self, comp_node, keepables):
        """Collect all composite prereqs and create the specified composite.

        Args:
            comp_node (Node): Composite Node to generate a Dataset for
            keepables (set): `set` to update if any datasets are needed
                             when generation is continued later. This can
                             happen if generation is delayed to incompatible
                             areas which would require resampling first.

        """
        if comp_node.name in self.datasets:
            # already loaded
            return
        compositor, prereqs, optional_prereqs = comp_node.data

        try:
            prereq_datasets = self._get_prereq_datasets(
                comp_node.name,
                prereqs,
                keepables,
            )
        except KeyError:
            return

        optional_datasets = self._get_prereq_datasets(
            comp_node.name,
            optional_prereqs,
            keepables,
            skip=True
        )

        try:
            composite = compositor(prereq_datasets,
                                   optional_datasets=optional_datasets,
                                   **self.attrs)

            cid = DatasetID.from_dict(composite.attrs)

            self.datasets[cid] = composite
            # update the node with the computed DatasetID
            if comp_node.name in self.wishlist:
                self.wishlist.remove(comp_node.name)
                self.wishlist.add(cid)
            comp_node.name = cid
        except IncompatibleAreas:
            LOG.warning("Delaying generation of %s "
                        "because of incompatible areas",
                        str(compositor.id))
            preservable_datasets = set(self.datasets.keys())
            prereq_ids = set(p.name for p in prereqs)
            opt_prereq_ids = set(p.name for p in optional_prereqs)
            keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids)
            # even though it wasn't generated keep a list of what
            # might be needed in other compositors
            keepables.add(comp_node.name)
            return
예제 #25
0
 def test_get_coordinates_for_dataset_key_without(self):
     """Test getting coordinates for a key without coordinates."""
     ds_id = DatasetID(name='lons',
                       wavelength=None,
                       resolution=None,
                       polarization=None,
                       calibration=None,
                       modifiers=())
     res = self.reader._get_coordinates_for_dataset_key(ds_id)
     self.assertListEqual(res, [])
예제 #26
0
    def test_get_area_def(self):
        """Test getting the area def."""
        area = self.reader.get_area_def(DatasetID('VIS006'))
        self.assertEqual(area.proj_dict, {'a': 6378169.0,
                                          'b': 6356583.8,
                                          'h': 35785831.0,
                                          'lon_0': 44.0,
                                          'proj': 'geos',
                                          'units': 'm'})
        self.assertEqual(area.area_extent,
                         (-77771774058.38356, -3720765401003.719,
                          30310525626438.438, 77771774058.38356))

        # Data shifted by 1.5km to N-W
        self.reader.mda['offset_corrected'] = False
        area = self.reader.get_area_def(DatasetID('VIS006'))
        self.assertEqual(area.area_extent,
                         (-77771772558.38356, -3720765402503.719,
                          30310525627938.438, 77771772558.38356))
예제 #27
0
 def test_get_coordinates_for_dataset_key(self):
     """Test getting coordinates for a key."""
     ds_id = DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7),
                       resolution=None, polarization=None,
                       calibration='reflectance', modifiers=())
     res = self.reader._get_coordinates_for_dataset_key(ds_id)
     self.assertListEqual(res,
                          [DatasetID(name='lons',
                                     wavelength=None,
                                     resolution=None,
                                     polarization=None,
                                     calibration=None,
                                     modifiers=()),
                           DatasetID(name='lats',
                                     wavelength=None,
                                     resolution=None,
                                     polarization=None,
                                     calibration=None,
                                     modifiers=())])
예제 #28
0
    def test_get_dataset_channels(self, get_channel, *mocks):
        from satpy.dataset import DatasetID

        # Mock reader and file handler
        fh = self._get_fh_mocked(
            reader=self._get_reader_mocked(),
            chn_dict={'1': 0, '5': 0},
            start_line=None,
            end_line=None,
            strip_invalid_coords=False,
            filename_info={'orbit_number': 123},
            sensor='sensor',
        )

        # Test calibration to reflectance as well as attributes.
        counts = np.ones((3, 3))
        get_channel.return_value = counts
        key = DatasetID('1', calibration='reflectance')
        info = {'name': '1', 'standard_name': 'my_standard_name'}

        res = fh.get_dataset(key=key, info=info)
        exp = xr.DataArray(da.ones((3, 3)),
                           name=res.name,
                           dims=('y', 'x'),
                           coords={'acq_time': ('y', [0, 1, 2])},
                           attrs={'name': '1',
                                  'platform_name': 'spacecraft_name',
                                  'orbit_number': 123,
                                  'sensor': 'sensor',
                                  'orbital_parameters': {'tle': 'tle'},
                                  'midnight_scanline': 1,
                                  'missing_scanlines': [1, 2, 3],
                                  'foo': 'bar',
                                  'standard_name': 'my_standard_name'})
        exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time'
        xr.testing.assert_identical(res, exp)
        get_channel.assert_called_with(key)

        # Counts & brightness temperature: Similar, just check _get_channel() call
        for key in [DatasetID('1', calibration='counts'),
                    DatasetID('5', calibration='brightness_temperature')]:
            fh.get_dataset(key=key, info={'name': 1})
            get_channel.assert_called_with(key)
예제 #29
0
    def test_get_channel(self):
        from satpy.dataset import DatasetID

        counts = np.moveaxis(np.array([[[1, 2, 3],
                                        [4, 5, 6]]]), 0, 2)
        calib_channels = 2 * counts
        reader = self._get_reader_mocked()
        reader.get_counts.return_value = counts
        reader.get_calibrated_channels.return_value = calib_channels
        fh = self._get_fh_mocked(reader=reader, counts=None, calib_channels=None,
                                 chn_dict={'1': 0})

        key = DatasetID('1', calibration='counts')
        # Counts
        res = fh._get_channel(key=key)
        np.testing.assert_array_equal(res, [[1, 2, 3],
                                            [4, 5, 6]])
        np.testing.assert_array_equal(fh.counts, counts)

        # Reflectance and Brightness Temperature
        for calib in ['reflectance', 'brightness_temperature']:
            key = DatasetID('1', calibration=calib)
            res = fh._get_channel(key=key)
            np.testing.assert_array_equal(res, [[2, 4, 6],
                                                [8, 10, 12]])
            np.testing.assert_array_equal(fh.calib_channels, calib_channels)

        # Invalid
        key = DatasetID('7', calibration='coffee')
        self.assertRaises(ValueError, fh._get_channel, key=key)

        # Buffering
        reader.get_counts.reset_mock()
        key = DatasetID('1', calibration='counts')
        fh._get_channel(key=key)
        reader.get_counts.assert_not_called()

        reader.get_calibrated_channels.reset_mock()
        for calib in ['reflectance', 'brightness_temperature']:
            key = DatasetID('1', calibration=calib)
            fh._get_channel(key)
            reader.get_calibrated_channels.assert_not_called()
예제 #30
0
    def test_get_area_def(self):
        """Test getting the area def."""
        area = self.reader.get_area_def(DatasetID('VIS006'))
        proj_dict = area.proj_dict
        self.assertEqual(proj_dict['a'], 6378169.0)
        self.assertEqual(proj_dict['b'], 6356583.8)
        self.assertEqual(proj_dict['h'], 35785831.0)
        self.assertEqual(proj_dict['lon_0'], 44.0)
        self.assertEqual(proj_dict['proj'], 'geos')
        self.assertEqual(proj_dict['units'], 'm')
        self.assertEqual(area.area_extent,
                         (-77771774058.38356, -3720765401003.719,
                          30310525626438.438, 77771774058.38356))

        # Data shifted by 1.5km to N-W
        self.reader.mda['offset_corrected'] = False
        area = self.reader.get_area_def(DatasetID('VIS006'))
        self.assertEqual(area.area_extent,
                         (-77771772558.38356, -3720765402503.719,
                          30310525627938.438, 77771772558.38356))
예제 #31
0
    def test_get_datasets_by_id(self):
        """Check getting datasets by id."""
        from satpy.dataset import DatasetID
        dsid = DatasetID('ch01')
        res = self.reader.get_ds_ids_by_id(dsid)
        self.assertEqual(len(res), 1)
        self.assertEqual(res[0].name, 'ch01')

        dsid = DatasetID(wavelength=.6)
        res = self.reader.get_ds_ids_by_id(dsid)
        self.assertEqual(len(res), 1)
        self.assertEqual(res[0].name, 'ch01')

        dsid = DatasetID('ch01', .6)
        res = self.reader.get_ds_ids_by_id(dsid)
        self.assertEqual(len(res), 1)
        self.assertEqual(res[0].name, 'ch01')

        dsid = DatasetID('ch01', .1)
        self.assertRaises(KeyError, self.reader.get_ds_ids_by_id, dsid)
예제 #32
0
    def _process_composite_config(self, composite_name, conf,
                                  composite_type, sensor_id, composite_config, **kwargs):

        compositors = self.compositors[sensor_id]
        modifiers = self.modifiers[sensor_id]

        try:
            options = conf[composite_type][composite_name]
            loader = options.pop('compositor')
        except KeyError:
            if composite_name in compositors or composite_name in modifiers:
                return conf
            raise ValueError("'compositor' missing or empty in {0}. Option keys = {1}".format(
                composite_config, str(options.keys())))

        options['name'] = composite_name
        for prereq_type in ['prerequisites', 'optional_prerequisites']:
            prereqs = []
            for item in options.get(prereq_type, []):
                if isinstance(item, dict):
                    # we want this prerequisite to act as a query with
                    # 'modifiers' being None otherwise it will be an empty
                    # tuple
                    item.setdefault('modifiers', None)
                    key = DatasetID.from_dict(item)
                    prereqs.append(key)
                else:
                    prereqs.append(item)
            options[prereq_type] = prereqs

        if composite_type == 'composites':
            options.update(**kwargs)
            key = DatasetID.from_dict(options)
            comp = loader(**options)
            compositors[key] = comp
        elif composite_type == 'modifiers':
            modifiers[composite_name] = loader, options
예제 #33
0
    def update_ds_ids_from_file_handlers(self):
        """Update DatasetIDs with information from loaded files.

        This is useful, for example, if dataset resolution may change
        depending on what files were loaded.

        """
        for file_handlers in self.file_handlers.values():
            fh = file_handlers[0]
            # update resolution in the dataset IDs for this files resolution
            res = getattr(fh, 'resolution', None)
            if res is None:
                continue

            for ds_id, ds_info in list(self.ids.items()):
                if fh.filetype_info['file_type'] != ds_info['file_type']:
                    continue
                if ds_id.resolution is not None:
                    continue
                ds_info['resolution'] = res
                new_id = DatasetID.from_dict(ds_info)
                self.ids[new_id] = ds_info
                del self.ids[ds_id]