예제 #1
0
    def get_dataset(self, key, info):
        """Get a dataset from the file."""

        if key.name in CHANNEL_NAMES:
            dataset = self.calibrate([key])[0]
            # dataset.info.update(info)
        elif key.name in ['longitude', 'latitude']:
            if self.lons is None or self.lats is None:
                self.navigate()
            if key.name == 'longitude':
                return Dataset(self.lons, id=key, **info)
            else:
                return Dataset(self.lats, id=key, **info)
        else:  # Get sun-sat angles
            if key.name in ANGLES:
                if isinstance(getattr(self, ANGLES[key.name]), np.ndarray):
                    dataset = Dataset(
                        getattr(self, ANGLES[key.name]),
                        copy=False)
                else:
                    dataset = self.get_angles(key.name)
            else:
                logger.exception(
                    "Not a supported sun-sensor viewing angle: %s", key.name)
                raise

        # TODO get metadata

        if not self._shape:
            self._shape = dataset.shape

        return dataset
예제 #2
0
    def get_dataset(self, key, info):
        if self._data is None:
            self.read()

        if key.name in ['latitude', 'longitude']:
            lons, lats = self.get_lonlats()
            if key.name == 'latitude':
                return Dataset(lats, id=key)
            else:
                return Dataset(lons, id=key)

        avhrr_channel_index = {
            '1': 0,
            '2': 1,
            '3a': 2,
            '3b': 2,
            '4': 3,
            '5': 4
        }
        index = avhrr_channel_index[key.name]
        mask = False
        if key.name in ['3a', '3b'] and self._is3b is None:
            ch3a = bfield(self._data["id"]["id"], 10)
            self._is3b = np.logical_not(ch3a)

        if key.name == '3a':
            mask = np.tile(self._is3b, (1, 2048))
        elif key.name == '3b':
            mask = np.tile(np.logical_not(self._is3b), (1, 2048))

        data = self._data["image_data"][:, :, index]
        if key.calibration == 'counts':
            return Dataset(data, mask=mask, area=self.get_lonlats(), units='1')

        pg_spacecraft = ''.join(self.platform_name.split()).lower()

        jdays = (np.datetime64(self.start_time) -
                 np.datetime64(str(self.year) +
                               '-01-01T00:00:00Z')) / np.timedelta64(1, 'D')
        if index < 2 or key.name == '3a':
            data = calibrate_solar(data, index, self.year, jdays,
                                   pg_spacecraft)
            units = '%'

        if index > 2 or key.name == '3b':
            if self.times is None:
                self.times = time_seconds(self._data["timecode"], self.year)
            line_numbers = (np.round(
                (self.times - self.times[-1]) /
                np.timedelta64(166666667, 'ns'))).astype(np.int)
            line_numbers -= line_numbers[0]
            if self.prt is None:
                self.prt, self.ict, self.space = self.get_telemetry()
            chan = index + 1
            data = calibrate_thermal(data, self.prt, self.ict[:, chan - 3],
                                     self.space[:, chan - 3], line_numbers,
                                     chan, pg_spacecraft)
            units = 'K'
        # TODO: check if entirely masked before returning
        return Dataset(data, mask=mask, units=units)
예제 #3
0
    def get_dataset(self, key, info):

        if self.reader is None:

            with open(self.filename) as fdes:
                data = fdes.read(3)
            if data in ["CMS", "NSS", "UKM", "DSS"]:
                reader = GACKLMReader
                self.chn_dict = AVHRR3_CHANNEL_NAMES
            else:
                reader = GACPODReader
                self.chn_dict = AVHRR_CHANNEL_NAMES

            self.reader = reader()
            self.reader.read(self.filename)

        if key.name in ['latitude', 'longitude']:
            if self.reader.lons is None or self.reader.lats is None:
                self.reader.get_lonlat(clock_drift_adjust=False)
            if key.name == 'latitude':
                return Dataset(self.reader.lats, id=key, **info)
            else:
                return Dataset(self.reader.lons, id=key, **info)

        if self.channels is None:
            self.channels = self.reader.get_calibrated_channels()

        data = self.channels[:, :, self.chn_dict[key.name]]
        return Dataset(data, id=key, **info)
예제 #4
0
    def __call__(self, projectables, **info):
        if len(projectables) != 2:
            raise ValueError("Expected 2 datasets, got %d" %
                             (len(projectables), ))

        # TODO: support datasets with palette to delegate this to the image
        # writer.

        data, palette = projectables
        palette = palette / 255.0

        from trollimage.colormap import Colormap
        if data.dtype == np.dtype('uint8'):
            tups = [(val, tuple(tup))
                    for (val, tup) in enumerate(palette[:-1])]
            colormap = Colormap(*tups)
        elif 'valid_range' in data.info:
            tups = [(val, tuple(tup))
                    for (val, tup) in enumerate(palette[:-1])]
            colormap = Colormap(*tups)
            colormap.set_range(*data.info['valid_range'])
        r, g, b = colormap.colorize(data)
        r[data.mask] = palette[-1][0]
        g[data.mask] = palette[-1][1]
        b[data.mask] = palette[-1][2]
        r = Dataset(r, copy=False, mask=data.mask, **data.info)
        g = Dataset(g, copy=False, mask=data.mask, **data.info)
        b = Dataset(b, copy=False, mask=data.mask, **data.info)

        return super(PaletteCompositor, self).__call__((r, g, b), **data.info)
예제 #5
0
    def __call__(self, projectables, *args, **kwargs):
        try:

            vis06 = projectables[0]
            vis08 = projectables[1]
            hrv = projectables[2]

            ndvi = (vis08 - vis06) / (vis08 + vis06)
            ndvi = np.where(ndvi < 0, 0, ndvi)

            # info = combine_info(*projectables)
            # info['name'] = self.info['name']
            # info['standard_name'] = self.info['standard_name']

            ch1 = Dataset(ndvi * vis06 + (1 - ndvi) * vis08,
                          copy=False,
                          **vis06.info)
            ch2 = Dataset(ndvi * vis08 + (1 - ndvi) * vis06,
                          copy=False,
                          **vis08.info)
            ch3 = Dataset(3 * hrv - vis06 - vis08, copy=False, **hrv.info)

            res = RGBCompositor.__call__(self, (ch1, ch2, ch3), *args,
                                         **kwargs)
        except ValueError:
            raise IncompatibleAreas
        return res
예제 #6
0
def step_impl(context):
    """
    :type context: behave.runner.Context
    """
    from satpy import Scene
    from satpy.dataset import Dataset
    scn = Scene()
    scn["MyDataset"] = Dataset([[1, 2], [3, 4]])
    scn["MyDataset2"] = Dataset([[5, 6], [7, 8]])
    context.scene = scn
예제 #7
0
def stack(datasets):
    """First dataset at the bottom."""

    base = Dataset(datasets[0], copy=True)
    for dataset in datasets[1:]:
        base_mask = np.ma.getmaskarray(base)
        other_mask = np.ma.getmaskarray(dataset)
        base.mask = np.logical_and(base_mask, other_mask)
        not_masked = np.logical_not(other_mask)
        base[not_masked] = dataset[not_masked]

    return base
예제 #8
0
def step_impl(context):
    """Datasets with the same name but different other ID parameters"""
    from satpy import Scene
    from satpy.dataset import Dataset, DatasetID
    scn = Scene()
    scn[DatasetID('ds1', calibration='radiance')] = Dataset([[1, 2], [3, 4]])
    scn[DatasetID('ds1', resolution=500, calibration='reflectance')] = Dataset([[5, 6], [7, 8]])
    scn[DatasetID('ds1', resolution=250, calibration='reflectance')] = Dataset([[5, 6], [7, 8]])
    scn[DatasetID('ds1', resolution=1000, calibration='reflectance')] = Dataset([[5, 6], [7, 8]])
    scn[DatasetID('ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = Dataset([[5, 6], [7, 8]])
    scn[DatasetID('ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2'))] = Dataset([[5, 6], [7, 8]])
    context.scene = scn
예제 #9
0
def step_impl(context):
    """
    :type context: behave.runner.Context
    """
    from satpy.scene import Scene
    from datetime import datetime
    from satpy.dataset import Dataset
    scn = Scene(platform_name="Suomi-NPP", sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    scn["MyDataset"] = Dataset([[1, 2], [3, 4]])
    scn["MyDataset2"] = Dataset([[5, 6], [7, 8]])
    context.scene = scn
예제 #10
0
    def __call__(self, projectables, optional_datasets=None, **info):
        """Get the reflectance part of an NIR channel. Not supposed to be used
        for wavelength outside [3, 4] µm.
        """
        self._init_refl3x(projectables)
        _nir, _ = projectables
        proj = Dataset(
            self._get_reflectance(projectables, optional_datasets) * 100,
            **_nir.info)

        proj.info['units'] = '%'
        self.apply_modifier_info(_nir, proj)

        return proj
예제 #11
0
    def get_dataset(self,
                    key,
                    info,
                    out=None,
                    xslice=slice(None),
                    yslice=slice(None)):

        if key.name not in self.channel_order_list:
            raise KeyError('Channel % s not available in the file' % key.name)
        elif key.name not in ['HRV']:
            ch_idn = self.channel_order_list.index(key.name)
            data = dec10216(
                self.memmap['visir']['line_data'][:, ch_idn, :])[::-1, ::-1]

            data = np.ma.masked_array(data, mask=(data == 0))
            res = Dataset(data, dtype=np.float32)
        else:
            data2 = dec10216(self.memmap["hrv"]["line_data"][:,
                                                             2, :])[::-1, ::-1]
            data1 = dec10216(self.memmap["hrv"]["line_data"][:,
                                                             1, :])[::-1, ::-1]
            data0 = dec10216(self.memmap["hrv"]["line_data"][:,
                                                             0, :])[::-1, ::-1]
            # Make empty array:
            shape = data0.shape[0] * 3, data0.shape[1]
            data = np.zeros(shape)
            idx = range(0, shape[0], 3)
            data[idx, :] = data2
            idx = range(1, shape[0], 3)
            data[idx, :] = data1
            idx = range(2, shape[0], 3)
            data[idx, :] = data0

            data = np.ma.masked_array(data, mask=(data == 0))
            res = Dataset(data, dtype=np.float32)

        if res is not None:
            out = res
        else:
            return None

        self.calibrate(out, key)
        out.info['units'] = info['units']
        out.info['wavelength'] = info['wavelength']
        out.info['standard_name'] = info['standard_name']
        out.info['platform_name'] = self.platform_name
        out.info['sensor'] = 'seviri'

        return out
예제 #12
0
    def __call__(self, projectables, optional_datasets=None, **info):
        """Get the atmospherical correction. Uses pyspectral.
        """
        from pyspectral.atm_correction_ir import AtmosphericalCorrection

        band = projectables[0]

        if optional_datasets:
            satz = optional_datasets[0]
        else:
            from pyorbital.orbital import get_observer_look
            lons, lats = band.info['area'].get_lonlats()

            try:
                dummy, satel = get_observer_look(
                    band.info['satellite_longitude'],
                    band.info['satellite_latitude'],
                    band.info['satellite_altitude'], band.info['start_time'],
                    lons, lats, 0)
            except KeyError:
                raise KeyError('Band info is missing some meta data!')
            satz = 90 - satel
            del satel

        LOG.info('Correction for limb cooling')
        corrector = AtmosphericalCorrection(band.info['platform_name'],
                                            band.info['sensor'])

        atm_corr = corrector.get_correction(satz, band.info['name'], band)

        proj = Dataset(atm_corr, copy=False, **band.info)
        self.apply_modifier_info(band, proj)

        return proj
예제 #13
0
    def get_dataset(self, dsid, info, out=None):
        """Load a dataset."""

        logger.debug('Reading %s.', dsid.name)
        variable = self.nc[dsid.name]

        info = {'platform_name': self.platform_name, 'sensor': self.sensor}

        try:
            values = np.ma.masked_equal(variable[:],
                                        variable.attrs['_FillValue'],
                                        copy=False)
        except KeyError:
            values = np.ma.array(variable[:], copy=False)
        if 'scale_factor' in variable.attrs:
            values = values * variable.attrs['scale_factor']
            info['scale_factor'] = variable.attrs['scale_factor']
        if 'add_offset' in variable.attrs:
            values = values + variable.attrs['add_offset']
            info['add_offset'] = variable.attrs['add_offset']
        if 'valid_range' in variable.attrs:
            info['valid_range'] = variable.attrs['valid_range']
        if 'units' in variable.attrs:
            info['units'] = variable.attrs['units']
        if 'standard_name' in variable.attrs:
            info['standard_name'] = variable.attrs['standard_name']

        if self.pps and dsid.name == 'ctth_alti':
            info['valid_range'] = (0., 8500.)
        if self.pps and dsid.name == 'ctth_alti_pal':
            values = values[1:, :]

        proj = Dataset(np.squeeze(values), copy=False, **info)
        return proj
예제 #14
0
    def get_dataset(self, key, info, out=None):
        """Get a dataset from the file."""

        logger.debug("Reading %s.", key.name)
        values = self.file_content[key.name]
        selected = np.array(self.selected)
        if key.name in ("Latitude", "Longitude"):
            values = values / 10000.
        if key.name in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'):
            goods = values > -9998.
            selected = np.array(selected & goods)
            if key.name in ('Tsurf', "Alt_surface", "CloudTopTemp"):
                values = values / 100.
            if key.name in ("CloudTopPres"):
                values = values / 10.
        else:
            selected = self.selected
        mask_values = np.ma.masked_array(values, mask=~selected)

        # update dataset info with file_info
        for k, v in self.finfo.items():
            info[k] = v

        ds = Dataset(mask_values, copy=False, **info)
        return ds
예제 #15
0
    def get_dataset(self, key, info):
        """Load a dataset."""
        logger.debug('Reading %s.', key.name)
        variable = self.nc[key.name]

        info = {'platform_name': self.platform_name,
                'sensor': self.sensor}

        try:
            values = np.ma.masked_equal(variable[:],
                                        variable.attrs['_FillValue'], copy=False)
        except KeyError:
            values = np.ma.array(variable[:], copy=False)
        if 'scale_factor' in variable.attrs:
            values = values * variable.attrs['scale_factor']
            info['scale_factor'] = variable.attrs['scale_factor']
        if 'add_offset' in variable.attrs:
            values = values + variable.attrs['add_offset']
            info['add_offset'] = variable.attrs['add_offset']

#        info = {'platform_name': self.platform_name,
#                'sensor': self.sensor}

        if 'valid_range' in variable.attrs:
            info['valid_range'] = variable.attrs['valid_range']
        if 'units' in variable.attrs:
            info['units'] = variable.attrs['units']

        proj = Dataset(values,
                       copy=False,
                       **info)
        return proj
예제 #16
0
파일: nc_olci.py 프로젝트: nguyentt85/satpy
    def get_dataset(self, key, info):
        """Load a dataset
        """
        if self.channel != key.name:
            return
        logger.debug('Reading %s.', key.name)
        variable = self.nc[self.channel + '_radiance']

        radiances = (np.ma.masked_equal(variable[:],
                                        variable.attrs['_FillValue'], copy=False) *
                     variable.attrs['scale_factor'] +
                     variable.attrs['add_offset'])
        units = variable.attrs['units']
        if key.calibration == 'reflectance':
            solar_flux = self.cal['solar_flux'][:]
            d_index = np.ma.masked_equal(self.cal['detector_index'][:],
                                         self.cal['detector_index'].attrs[
                                             '_FillValue'],
                                         copy=False)
            idx = int(key.name[2:]) - 1
            radiances /= solar_flux[idx, d_index]
            radiances *= np.pi * 100
            units = '%'

        proj = Dataset(radiances,
                       copy=False,
                       units=units,
                       platform_name=self.platform_name,
                       sensor=self.sensor)
        proj.info.update(key.to_dict())
        return proj
예제 #17
0
    def get_angles(self, angle_id):
        """Get sun-satellite viewing angles"""

        tic = datetime.now()

        sunz40km = self._data["ang"][:, :, 0] * 1e-2
        satz40km = self._data["ang"][:, :, 1] * 1e-2
        azidiff40km = self._data["ang"][:, :, 2] * 1e-2

        try:
            from geotiepoints.interpolator import Interpolator
        except ImportError:
            logger.warning("Could not interpolate sun-sat angles, "
                           "python-geotiepoints missing.")
            self.sunz, self.satz, self.azidiff = sunz40km, satz40km, azidiff40km
        else:
            cols40km = np.arange(24, 2048, 40)
            cols1km = np.arange(2048)
            lines = sunz40km.shape[0]
            rows40km = np.arange(lines)
            rows1km = np.arange(lines)

            along_track_order = 1
            cross_track_order = 3

            satint = Interpolator(
                [sunz40km, satz40km, azidiff40km], (rows40km, cols40km),
                (rows1km, cols1km), along_track_order, cross_track_order)
            self.sunz, self.satz, self.azidiff = satint.interpolate()

            logger.debug("Interpolate sun-sat angles: time %s",
                         str(datetime.now() - tic))

        return Dataset(getattr(self, ANGLES[angle_id]), copy=False)
예제 #18
0
    def get_dataset(self, key, info=None):
        """Load a dataset
        """
        if key in self.cache:
            return self.cache[key]

        logger.debug('Reading %s.', key.name)
        measured = self.nc['/data/%s/measured' % key.name]
        variable = self.nc['/data/%s/measured/effective_radiance' % key.name]

        # Get start/end line and column of loaded swath.

        self.startline = int(measured.variables['start_position_row'][...])
        self.endline = int(measured.variables['end_position_row'][...])
        self.startcol = int(measured.variables['start_position_column'][...])
        self.endcol = int(measured.variables['end_position_column'][...])

        ds = (np.ma.masked_equal(variable[:], variable.attrs['_FillValue']) *
              (variable.attrs['scale_factor'] * 1.0) +
              variable.attrs.get('add_offset', 0))

        self.calibrate(ds, key)

        out = Dataset(ds, dtype=np.float32)

        self.cache[key] = out
        self.nlines, self.ncols = ds.shape

        return out
예제 #19
0
 def __call__(self, projectables, nonprojectables=None, **info):
     if len(projectables) != 3:
         raise ValueError("Expected 3 datasets, got %d" %
                          (len(projectables), ))
     try:
         the_data = np.rollaxis(np.ma.dstack(
             [projectable for projectable in projectables]),
                                axis=2)
     except ValueError:
         raise IncompatibleAreas
     # info = projectables[0].info.copy()
     # info.update(projectables[1].info)
     # info.update(projectables[2].info)
     info = combine_info(*projectables)
     info.update(self.info)
     # FIXME: should this be done here ?
     info["wavelength"] = None
     info.pop("units", None)
     sensor = set()
     for projectable in projectables:
         current_sensor = projectable.info.get("sensor", None)
         if current_sensor:
             if isinstance(current_sensor, (str, bytes, six.text_type)):
                 sensor.add(current_sensor)
             else:
                 sensor |= current_sensor
     if len(sensor) == 0:
         sensor = None
     elif len(sensor) == 1:
         sensor = list(sensor)[0]
     info["sensor"] = sensor
     info["mode"] = "RGB"
     return Dataset(data=the_data, **info)
예제 #20
0
    def __call__(self, projectables, optional_datasets=None, **info):
        """CO2 correction of the brightness temperature of the MSG 3.9um
        channel.

        .. math::

          T4_CO2corr = (BT(IR3.9)^4 + Rcorr)^0.25
          Rcorr = BT(IR10.8)^4 - (BT(IR10.8)-dt_CO2)^4
          dt_CO2 = (BT(IR10.8)-BT(IR13.4))/4.0
        """
        (ir_039, ir_108, ir_134) = projectables
        LOG.info('Applying CO2 correction')
        dt_co2 = (ir_108 - ir_134) / 4.0
        rcorr = ir_108**4 - (ir_108 - dt_co2)**4
        t4_co2corr = ir_039**4 + rcorr
        t4_co2corr = np.ma.where(t4_co2corr > 0.0, t4_co2corr, 0)
        t4_co2corr = t4_co2corr**0.25

        info = ir_039.info.copy()

        proj = Dataset(t4_co2corr, mask=t4_co2corr.mask, **info)

        self.apply_modifier_info(ir_039, proj)

        return proj
예제 #21
0
    def __call__(self, projectables, optional_datasets=None, **info):
        """Get the corrected reflectance when removing Rayleigh scattering. Uses
        pyspectral.
        """
        from pyspectral.rayleigh import Rayleigh

        (vis, red) = projectables
        if vis.shape != red.shape:
            raise IncompatibleAreas
        try:
            (sata, satz, suna, sunz) = optional_datasets
        except ValueError:
            from pyorbital.astronomy import get_alt_az, sun_zenith_angle
            from pyorbital.orbital import get_observer_look
            lons, lats = vis.info['area'].get_lonlats()
            sunalt, suna = get_alt_az(vis.info['start_time'], lons, lats)
            suna = np.rad2deg(suna)
            sunz = sun_zenith_angle(vis.info['start_time'], lons, lats)
            sata, satel = get_observer_look(vis.info['satellite_longitude'],
                                            vis.info['satellite_latitude'],
                                            vis.info['satellite_altitude'],
                                            vis.info['start_time'], lons, lats,
                                            0)
            satz = 90 - satel
            del satel
        LOG.info('Removing Rayleigh scattering and aerosol absorption')

        # First make sure the two azimuth angles are in the range 0-360:
        sata = np.mod(sata, 360.)
        suna = np.mod(suna, 360.)
        ssadiff = np.abs(suna - sata)
        ssadiff = np.where(ssadiff > 180, 360 - ssadiff, ssadiff)
        del sata, suna

        atmosphere = self.info.get('atmosphere', 'us-standard')
        aerosol_type = self.info.get('aerosol_type', 'marine_clean_aerosol')

        corrector = Rayleigh(vis.info['platform_name'],
                             vis.info['sensor'],
                             atmosphere=atmosphere,
                             aerosol_type=aerosol_type)

        try:
            refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff,
                                                      vis.id.name, red)
        except KeyError:
            LOG.warning(
                "Could not get the reflectance correction using band name: %s",
                vis.id.name)
            LOG.warning(
                "Will try use the wavelength, however, this may be ambiguous!")
            refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff,
                                                      vis.id.wavelength[1],
                                                      red)

        proj = Dataset(vis - refl_cor_band, copy=False, **vis.info)
        self.apply_modifier_info(vis, proj)

        return proj
예제 #22
0
    def __call__(self, projectables, nonprojectables=None, **info):
        if len(projectables) != 2:
            raise ValueError("Expected 2 datasets, got %d" %
                             (len(projectables), ))
        info = combine_info(*projectables)
        info['name'] = self.info['name']

        return Dataset(projectables[0] - projectables[1], **info)
예제 #23
0
    def get_dataset(self, key, info, out=None, xslice=None, yslice=None):
        """Get the dataset designated by *key*."""
        if key.name in [
                'solar_zenith_angle', 'solar_azimuth_angle',
                'satellite_zenith_angle', 'satellite_azimuth_angle'
        ]:

            if key.name == 'solar_zenith_angle':
                var = self.sd.select('SolarZenith')
            if key.name == 'solar_azimuth_angle':
                var = self.sd.select('SolarAzimuth')
            if key.name == 'satellite_zenith_angle':
                var = self.sd.select('SensorZenith')
            if key.name == 'satellite_azimuth_angle':
                var = self.sd.select('SensorAzimuth')

            mask = var[:] == var._FillValue
            data = np.ma.masked_array(var[:] * var.scale_factor, mask=mask)
            return Dataset(data, id=key, **info)

        if key.name not in ['longitude', 'latitude']:
            return

        if (self.cache[key.resolution]['lons'] is None
                or self.cache[key.resolution]['lats'] is None):

            lons_id = DatasetID('longitude', resolution=key.resolution)
            lats_id = DatasetID('latitude', resolution=key.resolution)

            lons, lats = self.load([lons_id, lats_id],
                                   interpolate=False,
                                   raw=True)
            if key.resolution != self.resolution:
                from geotiepoints.geointerpolator import GeoInterpolator
                lons, lats = self._interpolate([lons, lats], self.resolution,
                                               lons_id.resolution,
                                               GeoInterpolator)
                lons = np.ma.masked_invalid(np.ascontiguousarray(lons))
                lats = np.ma.masked_invalid(np.ascontiguousarray(lats))
            self.cache[key.resolution]['lons'] = lons
            self.cache[key.resolution]['lats'] = lats

        if key.name == 'latitude':
            return Dataset(self.cache[key.resolution]['lats'], id=key, **info)
        else:
            return Dataset(self.cache[key.resolution]['lons'], id=key, **info)
예제 #24
0
파일: __init__.py 프로젝트: Nojuman/satpy
    def __call__(self, projectables, optional_datasets=None, **info):
        """Get the emissive part an NIR channel after having derived the reflectance.
        Not supposed to be used for wavelength outside [3, 4] µm.
        """
        self._init_refl3x(projectables)
        # Derive the sun-zenith angles, and use the nir and thermal ir
        # brightness tempertures and derive the reflectance using
        # PySpectral. The reflectance is stored internally in PySpectral and
        # needs to be derived first in order to get the emissive part.
        _ = self._get_reflectance(projectables, optional_datasets)
        _nir, _ = projectables
        proj = Dataset(self._refl3x.emissive_part_3x(), **_nir.attrs)

        proj.attrs['units'] = 'K'
        self.apply_modifier_info(_nir, proj)

        return proj
예제 #25
0
    def __call__(self, projectables, optional_datasets=None, **info):
        """Get the emissive part an NIR channel after having derived the reflectance.
        Not supposed to be used for wavelength outside [3, 4] µm.
        """
        self._init_refl3x(projectables)
        # Derive the sun-zenith angles, and use the nir and thermal ir
        # brightness tempertures and derive the reflectance using
        # PySpectral. The reflectance is stored internally in PySpectral and
        # needs to be derived first in order to get the emissive part.
        _ = self._get_reflectance(projectables, optional_datasets)
        _nir, _ = projectables
        proj = Dataset(self._refl3x.emissive_part_3x(), **_nir.attrs)

        proj.attrs['units'] = 'K'
        self.apply_modifier_info(_nir, proj)

        return proj
예제 #26
0
    def read_geo(self, key, info):
        """Read angles.
        """
        pairs = {
            ('satellite_azimuth_angle', 'satellite_zenith_angle'):
            ("SatelliteAzimuthAngle", "SatelliteZenithAngle"),
            ('solar_azimuth_angle', 'solar_zenith_angle'):
            ("SolarAzimuthAngle", "SolarZenithAngle"),
            ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'):
            ("SolarAzimuthAngle", "SolarZenithAngle"),
            ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'):
            ("LunarAzimuthAngle", "LunarZenithAngle"),
        }

        for pair, fkeys in pairs.items():
            if key.name in pair:
                if (self.cache.get(pair[0]) is None
                        or self.cache.get(pair[1]) is None):
                    angles = self.angles(*fkeys)
                    self.cache[pair[0]], self.cache[pair[1]] = angles
                if key.name == pair[0]:
                    return Dataset(self.cache[pair[0]],
                                   copy=False,
                                   name=key.name,
                                   **self.mda)
                else:
                    return Dataset(self.cache[pair[1]],
                                   copy=False,
                                   name=key.name,
                                   **self.mda)

        if info.get('standard_name') in ['latitude', 'longitude']:
            if self.lons is None or self.lats is None:
                self.lons, self.lats = self.navigate()
            mda = self.mda.copy()
            mda.update(info)
            if info['standard_name'] == 'longitude':
                return Dataset(self.lons, copy=False, id=key, **mda)
            else:
                return Dataset(self.lats, copy=False, id=key, **mda)

        if key.name == 'dnb_moon_illumination_fraction':
            mda = self.mda.copy()
            mda.update(info)
            return Dataset(self.geostuff["MoonIllumFraction"].value, **info)
예제 #27
0
    def get_dataset(self, key, info):
        """Read data from file and return the corresponding projectables."""
        if key.name in ['longitude', 'latitude']:
            logger.debug('Reading coordinate arrays.')

            if self.lons is None or self.lats is None:
                self.lons, self.lats = self.get_lonlats()

            if key.name == 'latitude':
                proj = Dataset(self.lats, id=key, **info)
            else:
                proj = Dataset(self.lons, id=key, **info)

        else:
            data = self.get_sds_variable(key.name)
            proj = Dataset(data, id=key, **info)

        return proj
예제 #28
0
    def __call__(self, projectables, nonprojectables=None, **info):
        if len(projectables) != 1:
            raise ValueError("Expected 1 dataset, got %d" %
                             (len(projectables), ))

        info = combine_info(*projectables)
        info['name'] = self.info['name']
        info['standard_name'] = self.info['standard_name']

        return Dataset(projectables[0].copy(), **info.copy())
예제 #29
0
    def get_dataset(self, key, info, out=None):
        """Get a dataset from the file."""

        logger.debug("Reading %s.", key.name)
        values = self.file_content[key.name]
        selected = np.array(values)
        out = np.rot90(np.fliplr(np.transpose(selected)))
        info['filename'] = self.finfo['filename']
        ds = Dataset(out, copy=False, **info)
        return ds
예제 #30
0
    def __call__(self,
                 projectables,
                 lim_low=85.,
                 lim_high=95.,
                 *args,
                 **kwargs):
        if len(projectables) != 3:
            raise ValueError("Expected 3 datasets, got %d" %
                             (len(projectables), ))
        try:
            day_data = projectables[0].copy()
            night_data = projectables[1].copy()
            coszen = np.cos(np.deg2rad(projectables[2]))

            coszen -= min(np.cos(np.deg2rad(lim_high)),
                          np.cos(np.deg2rad(lim_low)))
            coszen /= np.abs(
                np.cos(np.deg2rad(lim_low)) - np.cos(np.deg2rad(lim_high)))
            coszen = np.clip(coszen, 0, 1)

            full_data = []

            # Apply enhancements
            day_data = enhance2dataset(day_data)
            night_data = enhance2dataset(night_data)

            # Match dimensions to the data with more channels
            # There are only 1-channel and 3-channel composites
            if day_data.shape[0] > night_data.shape[0]:
                night_data = np.ma.repeat(night_data, 3, 0)
            elif day_data.shape[0] < night_data.shape[0]:
                day_data = np.ma.repeat(day_data, 3, 0)

            for i in range(day_data.shape[0]):
                day = day_data[i, :, :]
                night = night_data[i, :, :]

                data = (1 - coszen) * np.ma.masked_invalid(night).filled(0) + \
                    coszen * np.ma.masked_invalid(day).filled(0)
                data = np.ma.array(data,
                                   mask=np.logical_and(night.mask, day.mask),
                                   copy=False)
                data = Dataset(np.ma.masked_invalid(data),
                               copy=True,
                               **projectables[0].info)
                full_data.append(data)

            res = RGBCompositor.__call__(
                self, (full_data[0], full_data[1], full_data[2]), *args,
                **kwargs)

        except ValueError:
            raise IncompatibleAreas

        return res
예제 #31
0
    def get_dataset(self, key, info):
        """Load a dataset."""
        if self._polarization != key.polarization:
            return

        logger.debug('Reading %s.', key.name)

        band = self.filehandle

        if key.name in ['longitude', 'latitude']:
            logger.debug('Constructing coordinate arrays.')

            if self.lons is None or self.lats is None:
                self.lons, self.lats = self.get_lonlats()

            if key.name == 'latitude':
                proj = Dataset(self.lats, id=key, **info)
            else:
                proj = Dataset(self.lons, id=key, **info)

        else:
            data = band.GetRasterBand(1).ReadAsArray().astype(np.float)
            logger.debug('Reading noise data.')

            noise = self.noise.get_noise_correction(data.shape)

            logger.debug('Reading calibration data.')

            cal = self.calibration.get_calibration('gamma', data.shape)
            cal_constant = self.calibration.get_calibration_constant()

            logger.debug('Calibrating.')

            data **= 2
            data += cal_constant - noise
            data /= cal
            data[data < 0] = 0
            del noise, cal

            proj = Dataset(np.sqrt(data), copy=False, units='sigma')
        del band
        return proj