Example #1
0
    def _mono_iterations(self, bands):

        # Init
        mono_proba = np.empty((np.prod(bands.shape[:-1]), 1))
        img_size = np.prod(bands.shape[1:-1])

        n_times = bands.shape[0]

        for t_i in range(0, n_times, self.max_proc_frames):

            # Extract mono features
            nt_min = t_i
            nt_max = min(t_i + self.max_proc_frames, n_times)

            bands_t = bands[nt_min:nt_max]

            mono_features = bands_t.reshape(np.prod(bands_t.shape[:-1]),
                                            bands_t.shape[-1])

            # Run mono classifier
            mono_proba[nt_min * img_size:nt_max *
                       img_size] = execute_with_mp_lock(
                           self.mono_classifier.predict_proba,
                           mono_features)[..., 1:]

        return mono_proba
Example #2
0
    def execute(self, eopatch):
        """ Add cloud binary mask and (optionally) cloud probability map to input eopatch

        :param eopatch: Input `EOPatch` instance
        :return: `EOPatch` with additional cloud maps
        """
        # Downsample or make request
        if not eopatch.data:
            raise ValueError('EOPatch must contain some data feature')
        if self.data_feature in eopatch.data:
            new_data, rescale = self._downscaling(
                eopatch.data[self.data_feature], eopatch.meta_info)
            reference_shape = eopatch.data[self.data_feature].shape[:3]
        else:
            new_data, new_dates = self._make_request(eopatch.bbox,
                                                     eopatch.meta_info,
                                                     eopatch.timestamp)
            removed_frames = eopatch.consolidate_timestamps(new_dates)
            for rm_frame in removed_frames:
                LOGGER.warning(
                    'Removed data for frame %s from '
                    'eopatch due to unavailability of %s!', str(rm_frame),
                    self.data_feature)

            # Get reference shape from first item in data dictionary
            if not eopatch.data:
                raise ValueError(
                    'Given EOPatch does not have any data feature')

            reference_data_feature = sorted(eopatch.data)[0]
            reference_shape = eopatch.data[reference_data_feature].shape[:3]
            rescale = self._get_rescale_factors(reference_shape[1:3],
                                                eopatch.meta_info)

        clf_probs_lr = execute_with_mp_lock(
            self.classifier.get_cloud_probability_maps, new_data)
        clf_mask_lr = self.classifier.get_mask_from_prob(clf_probs_lr)

        # Add cloud mask as a feature to EOPatch
        clf_mask_hr = self._upsampling(clf_mask_lr,
                                       rescale,
                                       reference_shape,
                                       interp='nearest')
        eopatch.mask[self.cm_feature] = clf_mask_hr.astype(np.bool)

        # If the feature name for cloud probability maps is specified, add as feature
        if self.cprobs_feature is not None:
            clf_probs_hr = self._upsampling(clf_probs_lr,
                                            rescale,
                                            reference_shape,
                                            interp='linear')
            eopatch.data[self.cprobs_feature] = clf_probs_hr.astype(np.float32)

        return eopatch
Example #3
0
    def _run_prediction(classifier, features):
        """Uses classifier object on given data"""
        is_booster = isinstance(classifier, Booster)

        if is_booster:
            predict_method = classifier.predict
        else:
            # We assume it is a scikit-learn Estimator model
            predict_method = classifier.predict_proba

        prediction = execute_with_mp_lock(predict_method, features)

        if is_booster:
            return prediction
        return prediction[..., 1]
Example #4
0
    def _multi_iterations(self, bands, is_data):

        # Init
        multi_proba = np.empty((np.prod(bands.shape[:-1]), 1))
        img_size = np.prod(bands.shape[1:-1])

        n_times = bands.shape[0]

        loc_mu = None
        loc_var = None

        prev_nt_min = None
        prev_nt_max = None

        for t_i in range(n_times):

            # Extract temporal window indices
            nt_min, nt_max, nt_rel = self._frame_indices(n_times, t_i)

            bands_t = bands[nt_min:nt_max]
            is_data_t = is_data[nt_min:nt_max]

            masked_bands = np.ma.array(
                bands_t, mask=~is_data_t.repeat(bands_t.shape[-1], axis=-1))

            # Add window averages and variances to local data
            if loc_mu is None or prev_nt_min != nt_min or prev_nt_max != nt_max:
                loc_mu, loc_var = self._update_batches(loc_mu, loc_var,
                                                       bands_t, is_data_t)

            # Interweave and concatenate
            multi_features = self._extract_multi_features(
                bands_t, is_data_t, loc_mu, loc_var, nt_rel, masked_bands)

            # Run multi classifier
            multi_proba[t_i * img_size:(t_i + 1) *
                        img_size] = execute_with_mp_lock(
                            self.multi_classifier.predict_proba,
                            multi_features)[..., 1:]

            prev_nt_min = nt_min
            prev_nt_max = nt_max

        return multi_proba