Esempio n. 1
0
 def manual_data_stack(self, datasets, measurements, mask, skip_corrections,
                       **kwargs):
     # pylint: disable=too-many-locals, too-many-branches
     # manual merge
     merged = None
     if mask:
         bands = [self._product.pq_band]
     else:
         bands = self.needed_bands()
     for ds in datasets:
         d = read_data(ds, measurements, self._geobox, **kwargs)
         extent_mask = None
         for band in bands:
             for f in self._product.extent_mask_func:
                 if extent_mask is None:
                     extent_mask = f(d, band)
                 else:
                     extent_mask &= f(d, band)
         dm = d.where(extent_mask)
         if self._product.solar_correction and not mask and not skip_corrections:
             for band in bands:
                 if band != self._product.pq_band:
                     dm[band] = solar_correct_data(dm[band], ds)
         if merged is None:
             merged = dm
         else:
             merged = merged.combine_first(dm)
     if mask:
         merged = merged.astype('uint8', copy=True)
         for band in bands:
             merged[band].attrs = d[band].attrs
     return merged
Esempio n. 2
0
    def data(self, datasets, mask=False, manual_merge=False, skip_corrections=False, **kwargs):
        # pylint: disable=too-many-locals, consider-using-enumerate
        if mask:
            prod = self._product.pq_product
            measurements = [prod.measurements[self._product.pq_band].copy()]
        else:
            prod = self._product.product
            measurements = [prod.measurements[name].copy() for name in self.needed_bands()]

        with datacube.set_options(reproject_threads=1, fast_load=True):
            if manual_merge:
                return self.manual_data_stack(datasets, measurements, mask, skip_corrections, **kwargs)
            elif self._product.solar_correction and not mask and not skip_corrections:
                # Merge performed already by dataset extent, but we need to
                # process the data for the datasets individually to do solar correction.
                merged = None
                for ds in datasets:
                    d = read_data(ds, measurements, self._geobox, **kwargs)
                    for band in self.needed_bands():
                        if band != self._product.pq_band:
                            d[band] = solar_correct_data(d[band], ds)
                    if merged is None:
                        merged = d
                    else:
                        merged = merged.combine_first(d)
                return merged
            else:
                data = read_data(datasets, measurements, self._geobox, self._resampling, **kwargs)
                return data
Esempio n. 3
0
 def manual_data_stack(self, datasets, measurements, mask, skip_corrections,
                       use_overviews, **kwargs):
     #pylint: disable=too-many-locals, too-many-branches, consider-using-enumerate
     # manual merge
     merged = None
     if mask:
         bands = [self._product.pq_band]
     else:
         bands = self.needed_bands()
     for i in range(0, len(datasets)):
         holder = numpy.empty(shape=tuple(), dtype=object)
         ds = datasets[i]
         d = read_data(ds, measurements, self._geobox, use_overviews,
                       **kwargs)
         extent_mask = None
         for band in bands:
             for f in self._product.extent_mask_func:
                 if extent_mask is None:
                     extent_mask = f(d, band)
                 else:
                     extent_mask &= f(d, band)
         dm = d.where(extent_mask)
         if self._product.solar_correction and not mask and not skip_corrections:
             for band in bands:
                 if band != self._product.pq_band:
                     dm[band] = solar_correct_data(dm[band], ds)
         if merged is None:
             merged = dm
         else:
             merged = merged.combine_first(dm)
     if mask:
         merged = merged.astype('uint8', copy=True)
         for band in bands:
             merged[band].attrs = d[band].attrs
     return merged
Esempio n. 4
0
 def manual_data_stack(self, datasets, measurements, mask, skip_corrections):
     # manual merge
     merged = None
     if mask:
         bands = [ self._product.pq_band ]
     else:
         bands = self.needed_bands()
     for i in range(0, len(datasets)):
         holder = numpy.empty(shape=tuple(), dtype=object)
         ds = datasets[i]
         holder[()] = [ ds ]
         sources = xarray.DataArray(holder)
         d = datacube.Datacube.load_data(sources, self._geobox, measurements)
         extent_mask = None
         for band in bands:
             for f in self._product.extent_mask_func:
                 if extent_mask is None:
                     extent_mask = f(d, band)
                 else:
                     extent_mask &= f(d, band)
         dm = d.where(extent_mask)
         if self._product.solar_correction and not mask and not skip_corrections:
             for band in bands:
                 if band != self._product.pq_band:
                     dm[band] = solar_correct_data(dm[band], ds)
         if merged is None:
             merged = dm
         else:
             merged = merged.combine_first(dm)
     if mask:
         merged = merged.astype('uint8', copy=True)
         for band in bands:
             merged[band].attrs = d[band].attrs
     return merged
Esempio n. 5
0
    def data(self,
             datasets,
             mask=False,
             manual_merge=False,
             skip_corrections=False,
             use_overviews=False,
             **kwargs):
        #pylint: disable=too-many-locals, consider-using-enumerate
        if mask:
            prod = self._product.pq_product
            measurements = [prod.measurements[self._product.pq_band].copy()]
        else:
            prod = self._product.product
            measurements = [
                prod.measurements[name].copy() for name in self.needed_bands()
            ]

        with datacube.set_options(reproject_threads=1, fast_load=True):
            if manual_merge:
                return self.manual_data_stack(datasets, measurements, mask,
                                              skip_corrections, use_overviews,
                                              **kwargs)
            elif self._product.solar_correction and not mask and not skip_corrections:
                # Merge performed already by dataset extent, but we need to
                # process the data for the datasets individually to do solar correction.
                merged = None
                for i in range(0, len(datasets)):
                    holder = numpy.empty(shape=tuple(), dtype=object)
                    ds = datasets[i]
                    d = read_data(ds, measurements, self._geobox,
                                  use_overviews, **kwargs)
                    for band in self.needed_bands():
                        if band != self._product.pq_band:
                            d[band] = solar_correct_data(d[band], ds)
                    if merged is None:
                        merged = d
                    else:
                        merged = merged.combine_first(d)
                return merged
            else:
                # Merge performed already by dataset extent
                if isinstance(datasets, xarray.DataArray):
                    sources = datasets
                else:
                    holder = numpy.empty(shape=tuple(), dtype=object)
                    holder[()] = datasets
                    sources = xarray.DataArray(holder)
                data = read_data(datasets, measurements, self._geobox,
                                 use_overviews, **kwargs)
                return data