Ejemplo n.º 1
0
    def calibrate(self,
                  dataset_id,
                  pre_launch_coeffs=False,
                  calib_coeffs=None):
        """Calibrate the data."""
        if calib_coeffs is None:
            calib_coeffs = {}

        units = {
            'reflectance': '%',
            'brightness_temperature': 'K',
            'counts': '',
            'radiance': 'W*m-2*sr-1*cm ?'
        }

        if dataset_id['name'] in ("3a", "3b") and self._is3b is None:
            # Is it 3a or 3b:
            line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0]
            self._is3a = da.bitwise_and(
                da.from_array(self._data['scnlinbit'], chunks=line_chunks),
                3) == 0
            self._is3b = da.bitwise_and(
                da.from_array(self._data['scnlinbit'], chunks=line_chunks),
                3) == 1

        try:
            vis_idx = ['1', '2', '3a'].index(dataset_id['name'])
            ir_idx = None
        except ValueError:
            vis_idx = None
            ir_idx = ['3b', '4', '5'].index(dataset_id['name'])

        mask = True
        if vis_idx is not None:
            coeffs = calib_coeffs.get('ch' + dataset_id['name'])
            if dataset_id['name'] == '3a':
                mask = self._is3a[:, None]
            ds = create_xarray(
                _vis_calibrate(self._data,
                               vis_idx,
                               dataset_id['calibration'],
                               pre_launch_coeffs,
                               coeffs,
                               mask=mask))
        else:
            if dataset_id['name'] == '3b':
                mask = self._is3b[:, None]
            ds = create_xarray(
                _ir_calibrate(self._header,
                              self._data,
                              ir_idx,
                              dataset_id['calibration'],
                              mask=mask))

        ds.attrs['units'] = units[dataset_id['calibration']]
        ds.attrs.update(dataset_id._asdict())
        return ds
Ejemplo n.º 2
0
 def check(self):
     """Check VIS channel quality and issue a warning if it's bad."""
     use_with_caution = da.bitwise_and(self._mask, 2)
     if use_with_caution.all():
         warnings.warn(
             'All pixels of the VIS channel are flagged as "use with '
             'caution". Use datasets "quality_pixel_bitmask" and '
             '"data_quality_bitmask" to find out why.')
Ejemplo n.º 3
0
    def _set_keep(self,
                  time_keep=None,
                  freq_keep=None,
                  corrprod_keep=None,
                  weights_keep=None,
                  flags_keep=None):
        """Set time, frequency and/or correlation product selection masks.

        Set the selection masks for those parameters that are present. Also
        include weights and flags selections as options.

        Parameters
        ----------
        time_keep : array of bool, shape (*T*,), optional
            Boolean selection mask with one entry per timestamp
        freq_keep : array of bool, shape (*F*,), optional
            Boolean selection mask with one entry per frequency channel
        corrprod_keep : array of bool, shape (*B*,), optional
            Boolean selection mask with one entry per correlation product
        weights_keep : 'all' or string or sequence of strings, optional
            Names of selected weight types (or 'all' for the lot)
        flags_keep : 'all' or string or sequence of strings, optional
            Names of selected flag types (or 'all' for the lot)

        """
        DataSet._set_keep(self, time_keep, freq_keep, corrprod_keep,
                          weights_keep, flags_keep)
        update_all = time_keep is not None or freq_keep is not None or corrprod_keep is not None
        update_flags = update_all or flags_keep is not None
        if not self.source.data:
            self._vis = self._weights = self._flags = None
        elif update_flags:
            # Create first-stage index from dataset selectors. Note: use
            # the member variables, not the parameters, because the parameters
            # can be None to indicate no change
            stage1 = (self._time_keep, self._freq_keep, self._corrprod_keep)
            if update_all:
                # Cache dask graphs for the data fields
                self._vis = DaskLazyIndexer(self.source.data.vis, stage1)
                self._weights = DaskLazyIndexer(self.source.data.weights,
                                                stage1)
            flag_transforms = []
            if ~self._flags_select != 0:
                # Copy so that the lambda isn't affected by future changes
                select = self._flags_select.copy()
                flag_transforms.append(
                    lambda flags: da.bitwise_and(select, flags))
            flag_transforms.append(lambda flags: flags.view(np.bool_))
            self._flags = DaskLazyIndexer(self.source.data.flags, stage1,
                                          flag_transforms)
Ejemplo n.º 4
0
    def _set_keep(self, time_keep=None, freq_keep=None, corrprod_keep=None,
                  weights_keep=None, flags_keep=None):
        """Set time, frequency and/or correlation product selection masks.

        Set the selection masks for those parameters that are present. Also
        include weights and flags selections as options.

        Parameters
        ----------
        time_keep : array of bool, shape (*T*,), optional
            Boolean selection mask with one entry per timestamp
        freq_keep : array of bool, shape (*F*,), optional
            Boolean selection mask with one entry per frequency channel
        corrprod_keep : array of bool, shape (*B*,), optional
            Boolean selection mask with one entry per correlation product
        weights_keep : 'all' or string or sequence of strings, optional
            Names of selected weight types (or 'all' for the lot)
        flags_keep : 'all' or string or sequence of strings, optional
            Names of selected flag types (or 'all' for the lot)

        """
        DataSet._set_keep(self, time_keep, freq_keep, corrprod_keep, weights_keep, flags_keep)
        update_all = time_keep is not None or freq_keep is not None or corrprod_keep is not None
        update_flags = update_all or flags_keep is not None
        if not self.source.data:
            self._vis = self._weights = self._flags = self._excision = None
        elif update_flags:
            # Create first-stage index from dataset selectors. Note: use
            # the member variables, not the parameters, because the parameters
            # can be None to indicate no change
            stage1 = (self._time_keep, self._freq_keep, self._corrprod_keep)
            if update_all:
                # Cache dask graphs for the data fields
                self._vis = DaskLazyIndexer(self._corrected.vis, stage1)
                self._weights = DaskLazyIndexer(self._corrected.weights, stage1)
            flag_transforms = []
            if ~self._flags_select != 0:
                # Copy so that the lambda isn't affected by future changes
                select = self._flags_select.copy()
                flag_transforms.append(lambda flags: da.bitwise_and(select, flags))
            flag_transforms.append(lambda flags: flags.view(np.bool_))
            self._flags = DaskLazyIndexer(self._corrected.flags, stage1, flag_transforms)
            unscaled_weights = self._corrected.unscaled_weights
            if unscaled_weights is None or self.accumulations_per_dump is None:
                self._excision = None
            else:
                # The maximum / expected number of CBF dumps per SDP dump
                cbf_dumps_per_sdp_dump = round(self.dump_period / self.cbf_dump_period)
                accs_per_sdp_dump = np.float32(self.accumulations_per_dump)
                accs_per_cbf_dump = accs_per_sdp_dump / np.float32(cbf_dumps_per_sdp_dump)
                # Each unscaled weight represents the actual number of accumulations per SDP dump.
                # Correct most of the weight compression artefacts by forcing each weight to be
                # an integer multiple of CBF n_accs, and then convert it to an excision fraction.
                def integer_cbf_dumps(w):
                    return da.round(w / accs_per_cbf_dump) * accs_per_cbf_dump

                def excision_fraction(w):
                    return (accs_per_sdp_dump - w) / accs_per_sdp_dump

                excision_transforms = [integer_cbf_dumps, excision_fraction]
                self._excision = DaskLazyIndexer(unscaled_weights, stage1, excision_transforms)
Ejemplo n.º 5
0
    def calibrate(self,
                  dataset_id,
                  pre_launch_coeffs=False,
                  calib_coeffs=None):
        """Calibrate the data."""
        if calib_coeffs is None:
            calib_coeffs = {}

        units = {
            'reflectance': '%',
            'brightness_temperature': 'K',
            'counts': '',
            'radiance': 'W*m-2*sr-1*cm ?'
        }

        if dataset_id.name in ("3a", "3b") and self._is3b is None:
            # Is it 3a or 3b:
            self._is3a = da.bitwise_and(
                da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK),
                3) == 0
            self._is3b = da.bitwise_and(
                da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK),
                3) == 1

        if dataset_id.name == '3a' and not np.any(self._is3a):
            raise ValueError("Empty dataset for channel 3A")
        if dataset_id.name == '3b' and not np.any(self._is3b):
            raise ValueError("Empty dataset for channel 3B")

        try:
            vis_idx = ['1', '2', '3a'].index(dataset_id.name)
            ir_idx = None
        except ValueError:
            vis_idx = None
            ir_idx = ['3b', '4', '5'].index(dataset_id.name)

        mask = True
        if vis_idx is not None:
            coeffs = calib_coeffs.get('ch' + dataset_id.name)
            if dataset_id.name == '3a':
                mask = self._is3a[:, None]
            ds = create_xarray(
                _vis_calibrate(self._data,
                               vis_idx,
                               dataset_id.calibration,
                               pre_launch_coeffs,
                               coeffs,
                               mask=mask))
        else:
            if dataset_id.name == '3b':
                mask = self._is3b[:, None]
            ds = create_xarray(
                _ir_calibrate(self._header,
                              self._data,
                              ir_idx,
                              dataset_id.calibration,
                              mask=mask))

        ds.attrs['units'] = units[dataset_id.calibration]
        ds.attrs.update(dataset_id._asdict())
        return ds