Beispiel #1
0
    def doit(self):

        shape = (4000, 4000)

        masks = [PQ_MASK_CLEAR,
                 PQ_MASK_SATURATION_OPTICAL,
                 PQ_MASK_SATURATION_THERMAL,
                 PQ_MASK_CONTIGUITY,
                 PQ_MASK_LAND,
                 PQ_MASK_CLOUD_ACCA,
                 PQ_MASK_CLOUD_FMASK,
                 PQ_MASK_CLOUD_SHADOW_ACCA,
                 PQ_MASK_CLOUD_SHADOW_FMASK]

        observation_count = empty_array(shape=shape, dtype=numpy.int16, ndv=0)

        observation_count_clear = dict()

        for mask in masks:
            observation_count_clear[mask] = empty_array(shape=shape, dtype=numpy.int16, ndv=0)

        metadata = None

        for tile in self.get_tiles():

            # Get the PQ mask

            pq = tile.datasets[DatasetType.PQ25]
            data = get_dataset_data(pq, [Pq25Bands.PQ])[Pq25Bands.PQ]

            #
            # Count any pixels that are no NDV - don't think we should actually have any but anyway
            #

            # Mask out any no data pixels - should actually be none but anyway
            pq = numpy.ma.masked_equal(data, NDV)

            # Count the data pixels - i.e. pixels that were NOT masked out
            observation_count += numpy.where(data.mask, 0, 1)

            #
            # Count and pixels that are not masked due to pixel quality
            #

            for mask in masks:
                # Apply the particular pixel mask
                pqm = numpy.ma.masked_where(numpy.bitwise_and(data, mask) != mask, data)

                # Count the pixels that were not masked out
                observation_count_clear[mask] += numpy.where(pqm.mask, 0, 1)

            if not metadata:
                metadata = get_dataset_metadata(pq)

        # Create the output datasets

        # Observation Count

        raster_create(self.output()[0].path, [observation_count] + [observation_count_clear[mask] for mask in masks],
                      metadata.transform, metadata.projection, NDV, GDT_Int16)
    def run(self):

        shape = (4000, 4000)

        extra = [ExtraBands.OBSERVATIONS, ExtraBands.LOW, ExtraBands.MEDIAN, ExtraBands.HIGH]

        band_desc = ["TOTAL_OBSERVATIONS","LOWEST TIDE","MEDIAN TIDE", "HIGHEST TIDE"]

        extra_fields = dict()
        for field in extra:
            extra_fields[field] = empty_array(shape=shape, dtype=numpy.int16, fill=0)

        metadata = None
        low_value = 0
        high_value = 0
        median_value = 0
        observations = 0
        cur = self.load_tidal_file()
        low=cur[0]
        high=cur[1]
        observations=cur[2]
        #import pdb; pdb.set_trace()
        low_value = int(float(low) * 1000)
        high_value = int(float(high) * 1000)
        median_value = (low_value + high_value)/2
        #import pdb; pdb.set_trace()
        _log.info("\toffset file low %d high %d median %d lines %d ", low_value, high_value, median_value, observations)
        for layer in extra_fields:
            lv = layer.value
            if lv == 1:
                extra_fields[layer][:] = observations
            if lv == 2:
                extra_fields[layer][:] = low_value
            if lv == 3:
                extra_fields[layer][:] = median_value
            if lv == 4:
                extra_fields[layer][:] = high_value
        transform = (self.x, 0.00025, 0.0, self.y+1, 0.0, -0.00025)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        projection = srs.ExportToWkt()

        # Create the output dataset
        metadata_info = self.generate_raster_metadata()
        raster_create(self.output().path, [extra_fields[field] for field in extra], transform, projection, NDV, GDT_Int16, dataset_metadata=metadata_info, band_ids=band_desc)
    def run(self):
        self.parse_arguments()

        config = Config()
        _log.debug(config.to_str())

        path = self.get_output_filename(self.dataset_type)
        _log.info("Output file is [%s]", path)

        if os.path.exists(path):
            if self.overwrite:
                _log.info("Removing existing output file [%s]", path)
                os.remove(path)
            else:
                _log.error("Output file [%s] exists", path)
                raise Exception("Output file [%s] already exists" % path)

        # TODO
        bands = get_bands(self.dataset_type, self.satellites[0])

        # TODO once WOFS is in the cube

        tiles = list_tiles_as_list(x=[self.x], y=[self.y], acq_min=self.acq_min, acq_max=self.acq_max,
                                   satellites=[satellite for satellite in self.satellites],
                                   dataset_types=[self.dataset_type],
                                   database=config.get_db_database(),
                                   user=config.get_db_username(),
                                   password=config.get_db_password(),
                                   host=config.get_db_host(), port=config.get_db_port())

        raster = None
        metadata = None

        # TODO - PQ is UNIT16 (others are INT16) and so -999 NDV doesn't work
        ndv = self.dataset_type == DatasetType.PQ25 and UINT16_MAX or NDV

        _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

        import itertools
        for x, y in itertools.product(range(0, 4000, self.chunk_size_x), range(0, 4000, self.chunk_size_y)):

            _log.info("About to read data chunk ({xmin:4d},{ymin:4d}) to ({xmax:4d},{ymax:4d})".format(xmin=x, ymin=y, xmax=x+self.chunk_size_x-1, ymax=y+self.chunk_size_y-1))
            _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

            stack = dict()

            for tile in tiles:

                if self.list_only:
                    _log.info("Would summarise dataset [%s]", tile.datasets[self.dataset_type].path)
                    continue

                pqa = None

                _log.debug("Reading dataset [%s]", tile.datasets[self.dataset_type].path)

                if not metadata:
                    metadata = get_dataset_metadata(tile.datasets[self.dataset_type])

                # Apply PQA if specified

                if self.apply_pqa_filter:
                    data = get_dataset_data_with_pq(tile.datasets[self.dataset_type], tile.datasets[DatasetType.PQ25], bands=bands, x=x, y=y, x_size=self.chunk_size_x, y_size=self.chunk_size_y, pq_masks=self.pqa_mask, ndv=ndv)

                else:
                    data = get_dataset_data(tile.datasets[self.dataset_type], bands=bands, x=x, y=y, x_size=self.chunk_size_x, y_size=self.chunk_size_y)

                for band in bands:
                    if band in stack:
                        stack[band].append(data[band])

                    else:
                        stack[band] = [data[band]]

                    _log.debug("data[%s] has shape [%s] and MB [%s]", band.name, numpy.shape(data[band]), data[band].nbytes/1000/1000)
                    _log.debug("stack[%s] has [%s] elements", band.name, len(stack[band]))

            # Apply summary method

            _log.info("Finished reading {count} datasets for chunk ({xmin:4d},{ymin:4d}) to ({xmax:4d},{ymax:4d}) - about to summarise them".format(count=len(tiles), xmin=x, ymin=y, xmax=x+self.chunk_size_x-1, ymax=y+self.chunk_size_y-1))
            _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

            masked_stack = dict()

            for band in bands:
                masked_stack[band] = numpy.ma.masked_equal(stack[band], ndv)
                _log.debug("masked_stack[%s] is %s", band.name, masked_stack[band])
                _log.debug("masked stack[%s] has shape [%s] and MB [%s]", band.name, numpy.shape(masked_stack[band]), masked_stack[band].nbytes/1000/1000)
                _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

                if self.summary_method == TimeSeriesSummaryMethod.MIN:
                    masked_summary = numpy.min(masked_stack[band], axis=0)

                elif self.summary_method == TimeSeriesSummaryMethod.MAX:
                    masked_summary = numpy.max(masked_stack[band], axis=0)

                elif self.summary_method == TimeSeriesSummaryMethod.MEAN:
                    masked_summary = numpy.mean(masked_stack[band], axis=0)

                elif self.summary_method == TimeSeriesSummaryMethod.MEDIAN:
                    masked_summary = numpy.median(masked_stack[band], axis=0)

                # aka 50th percentile

                elif self.summary_method == TimeSeriesSummaryMethod.MEDIAN_NON_INTERPOLATED:
                    masked_sorted = numpy.ma.sort(masked_stack[band], axis=0)
                    masked_percentile_index = numpy.ma.floor(numpy.ma.count(masked_sorted, axis=0) * 0.95).astype(numpy.int16)
                    masked_summary = numpy.ma.choose(masked_percentile_index, masked_sorted)

                elif self.summary_method == TimeSeriesSummaryMethod.COUNT:
                    # TODO Need to artificially create masked array here since it is being expected/filled below!!!
                    masked_summary = numpy.ma.masked_equal(masked_stack[band].count(axis=0), ndv)

                elif self.summary_method == TimeSeriesSummaryMethod.SUM:
                    masked_summary = numpy.sum(masked_stack[band], axis=0)

                elif self.summary_method == TimeSeriesSummaryMethod.STANDARD_DEVIATION:
                    masked_summary = numpy.std(masked_stack[band], axis=0)

                elif self.summary_method == TimeSeriesSummaryMethod.VARIANCE:
                    masked_summary = numpy.var(masked_stack[band], axis=0)

                # currently 95th percentile

                elif self.summary_method == TimeSeriesSummaryMethod.PERCENTILE:
                    masked_sorted = numpy.ma.sort(masked_stack[band], axis=0)
                    masked_percentile_index = numpy.ma.floor(numpy.ma.count(masked_sorted, axis=0) * 0.95).astype(numpy.int16)
                    masked_summary = numpy.ma.choose(masked_percentile_index, masked_sorted)

                elif self.summary_method == TimeSeriesSummaryMethod.YOUNGEST_PIXEL:

                    # TODO the fact that this is band at a time might be problematic.  We really should be considering
                    # all bands at once (that is what the landsat_mosaic logic did).  If PQA is being applied then
                    # it's probably all good but if not then we might get odd results....

                    masked_summary = empty_array(shape=(self.chunk_size_x, self.chunk_size_x), dtype=numpy.int16, ndv=ndv)

                    # Note the reversed as the stack is created oldest first
                    for d in reversed(stack[band]):
                        masked_summary = numpy.where(masked_summary == ndv, d, masked_summary)

                        # If the summary doesn't contain an no data values then we can stop
                        if not numpy.any(masked_summary == ndv):
                            break

                    # TODO Need to artificially create masked array here since it is being expected/filled below!!!
                    masked_summary = numpy.ma.masked_equal(masked_summary, ndv)

                elif self.summary_method == TimeSeriesSummaryMethod.OLDEST_PIXEL:

                    # TODO the fact that this is band at a time might be problematic.  We really should be considering
                    # all bands at once (that is what the landsat_mosaic logic did).  If PQA is being applied then
                    # it's probably all good but if not then we might get odd results....

                    masked_summary = empty_array(shape=(self.chunk_size_x, self.chunk_size_x), dtype=numpy.int16, ndv=ndv)

                    # Note the NOT reversed as the stack is created oldest first
                    for d in stack[band]:
                        masked_summary = numpy.where(masked_summary == ndv, d, masked_summary)

                        # If the summary doesn't contain an no data values then we can stop
                        if not numpy.any(masked_summary == ndv):
                            break

                    # TODO Need to artificially create masked array here since it is being expected/filled below!!!
                    masked_summary = numpy.ma.masked_equal(masked_summary, ndv)

                masked_stack[band] = None
                _log.debug("NONE-ing masked stack[%s]", band.name)
                _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

                _log.debug("masked summary is [%s]", masked_summary)
                _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

                # Create the output file

                if not os.path.exists(path):
                    _log.info("Creating raster [%s]", path)

                    driver = gdal.GetDriverByName("GTiff")
                    assert driver

                    raster = driver.Create(path, metadata.shape[0], metadata.shape[1], len(bands), gdal.GDT_Int16)
                    assert raster

                    raster.SetGeoTransform(metadata.transform)
                    raster.SetProjection(metadata.projection)

                    for b in bands:
                        raster.GetRasterBand(b.value).SetNoDataValue(ndv)

                _log.info("Writing band [%s] data to raster [%s]", band.name, path)
                _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

                raster.GetRasterBand(band.value).WriteArray(masked_summary.filled(ndv), xoff=x, yoff=y)
                raster.GetRasterBand(band.value).ComputeStatistics(True)

                raster.FlushCache()

                masked_summary = None
                _log.debug("NONE-ing the masked summary")
                _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

            stack = None
            _log.debug("Just NONE-ed the stack")
            _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

        raster = None

        _log.debug("Just NONE'd the raster")
        _log.debug("Current MAX RSS  usage is [%d] MB",  resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)

        _log.info("Memory usage was [%d MB]", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
        _log.info("CPU time used [%s]", timedelta(seconds=int(resource.getrusage(resource.RUSAGE_SELF).ru_utime)))
Beispiel #4
0
    def run(self):

        shape = (4000, 4000)

        extra = [
            ExtraBands.OBSERVATIONS, ExtraBands.LOW, ExtraBands.MEDIAN,
            ExtraBands.HIGH
        ]

        band_desc = [
            "TOTAL_OBSERVATIONS", "LOWEST TIDE", "MEDIAN TIDE", "HIGHEST TIDE"
        ]

        extra_fields = dict()
        for field in extra:
            extra_fields[field] = empty_array(shape=shape,
                                              dtype=numpy.int16,
                                              fill=0)

        metadata = None
        low_value = 0
        high_value = 0
        median_value = 0
        observations = 0
        cur = self.load_tidal_file()
        low = cur[0]
        high = cur[1]
        observations = cur[2]
        #import pdb; pdb.set_trace()
        low_value = int(float(low) * 1000)
        high_value = int(float(high) * 1000)
        median_value = (low_value + high_value) / 2
        #import pdb; pdb.set_trace()
        _log.info("\toffset file low %d high %d median %d lines %d ",
                  low_value, high_value, median_value, observations)
        for layer in extra_fields:
            lv = layer.value
            if lv == 1:
                extra_fields[layer][:] = observations
            if lv == 2:
                extra_fields[layer][:] = low_value
            if lv == 3:
                extra_fields[layer][:] = median_value
            if lv == 4:
                extra_fields[layer][:] = high_value
        transform = (self.x, 0.00025, 0.0, self.y + 1, 0.0, -0.00025)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        projection = srs.ExportToWkt()

        # Create the output dataset
        metadata_info = self.generate_raster_metadata()
        raster_create(self.output().path,
                      [extra_fields[field] for field in extra],
                      transform,
                      projection,
                      NDV,
                      GDT_Int16,
                      dataset_metadata=metadata_info,
                      band_ids=band_desc)
Beispiel #5
0
    def run(self):

        shape = (4000, 4000)
        no_data_value = NDV

        best_pixel_fc = dict()

        for band in Fc25Bands:
            # best_pixel_fc[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=INT16_MIN)
            best_pixel_fc[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)

        best_pixel_nbar = dict()

        for band in Ls57Arg25Bands:
            best_pixel_nbar[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)

        best_pixel_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        best_pixel_date = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        current_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        current_date = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        SATELLITE_DATA_VALUES = {Satellite.LS5: 5, Satellite.LS7: 7, Satellite.LS8: 8}

        metadata_nbar = None
        metadata_fc = None

        for tile in self.get_tiles():

            pqa = tile.datasets[DatasetType.PQ25]
            nbar = tile.datasets[DatasetType.ARG25]
            fc = tile.datasets[DatasetType.FC25]
            wofs = DatasetType.WATER in tile.datasets and tile.datasets[DatasetType.WATER] or None

            _log.info("Processing [%s]", fc.path)

            data = dict()

            # Create an initial "no mask" mask

            mask = numpy.ma.make_mask_none((4000, 4000))
            # _log.info("### mask is [%s]", mask[1000][1000])

            # Add the PQA mask if we are doing PQA masking

            if self.mask_pqa_apply:
                mask = get_mask_pqa(pqa, pqa_masks=self.mask_pqa_mask, mask=mask)
                # _log.info("### mask PQA is [%s]", mask[1000][1000])

            # Add the WOFS mask if we are doing WOFS masking

            if self.mask_wofs_apply and wofs:
                mask = get_mask_wofs(wofs, wofs_masks=self.mask_wofs_mask, mask=mask)
                # _log.info("### mask PQA is [%s]", mask[1000][1000])

            # Get NBAR dataset

            data[DatasetType.ARG25] = get_dataset_data_masked(nbar, mask=mask)
            # _log.info("### NBAR/RED is [%s]", data[DatasetType.ARG25][Ls57Arg25Bands.RED][1000][1000])

            # Get the NDVI dataset

            data[DatasetType.NDVI] = calculate_ndvi(data[DatasetType.ARG25][Ls57Arg25Bands.RED],
                                                    data[DatasetType.ARG25][Ls57Arg25Bands.NEAR_INFRARED])
            # _log.info("### NDVI is [%s]", data[DatasetType.NDVI][1000][1000])

            # Add the NDVI value range mask (to the existing mask)

            mask = self.get_mask_range(data[DatasetType.NDVI], min_val=0.0, max_val=0.3, mask=mask)
            # _log.info("### mask NDVI is [%s]", mask[1000][1000])

            # Get FC25 dataset

            data[DatasetType.FC25] = get_dataset_data_masked(fc, mask=mask)
            # _log.info("### FC/BS is [%s]", data[DatasetType.FC25][Fc25Bands.BARE_SOIL][1000][1000])

            # Add the bare soil value range mask (to the existing mask)

            mask = self.get_mask_range(data[DatasetType.FC25][Fc25Bands.BARE_SOIL], min_val=0, max_val=8000, mask=mask)
            # _log.info("### mask BS is [%s]", mask[1000][1000])

            # Apply the final mask to the FC25 bare soil data

            data_bare_soil = numpy.ma.MaskedArray(data=data[DatasetType.FC25][Fc25Bands.BARE_SOIL], mask=mask).filled(NDV)
            # _log.info("### bare soil is [%s]", data_bare_soil[1000][1000])

            # Compare the bare soil value from this dataset to the current "best" value

            best_pixel_fc[Fc25Bands.BARE_SOIL] = numpy.fmax(best_pixel_fc[Fc25Bands.BARE_SOIL], data_bare_soil)
            # _log.info("### best pixel bare soil is [%s]", best_pixel_fc[Fc25Bands.BARE_SOIL][1000][1000])

            # Now update the other best pixel datasets/bands to grab the pixels we just selected

            for band in Ls57Arg25Bands:
                best_pixel_nbar[band] = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BARE_SOIL],
                                                                       data_bare_soil,
                                                                       data[DatasetType.ARG25][band],
                                                                       best_pixel_nbar[band])

            for band in [Fc25Bands.PHOTOSYNTHETIC_VEGETATION, Fc25Bands.NON_PHOTOSYNTHETIC_VEGETATION, Fc25Bands.UNMIXING_ERROR]:
                best_pixel_fc[band] = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BARE_SOIL],
                                                                     data_bare_soil,
                                                                     data[DatasetType.FC25][band],
                                                                     best_pixel_fc[band])

            # And now the other "provenance" data

            # Satellite "provenance" data

            current_satellite.fill(SATELLITE_DATA_VALUES[fc.satellite])

            best_pixel_satellite = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BARE_SOIL],
                                                                  data_bare_soil,
                                                                  current_satellite,
                                                                  best_pixel_satellite)

            # Date "provenance" data

            current_date.fill(date_to_integer(tile.end_datetime))

            best_pixel_date = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BARE_SOIL],
                                                             data_bare_soil,
                                                             current_date,
                                                             best_pixel_date)

            # Grab the metadata from the input datasets for use later when creating the output datasets

            if not metadata_nbar:
                metadata_nbar = get_dataset_metadata(nbar)

            if not metadata_fc:
                metadata_fc = get_dataset_metadata(fc)

        # Create the output datasets

        # FC composite

        raster_create(self.get_dataset_filename("FC"),
                      [best_pixel_fc[b] for b in Fc25Bands],
                      metadata_fc.transform, metadata_fc.projection,
                      metadata_fc.bands[Fc25Bands.BARE_SOIL].no_data_value,
                      metadata_fc.bands[Fc25Bands.BARE_SOIL].data_type)

        # NBAR composite

        raster_create(self.get_dataset_filename("NBAR"),
                      [best_pixel_nbar[b] for b in Ls57Arg25Bands],
                      metadata_nbar.transform, metadata_nbar.projection,
                      metadata_nbar.bands[Ls57Arg25Bands.BLUE].no_data_value,
                      metadata_nbar.bands[Ls57Arg25Bands.BLUE].data_type)

        # Satellite "provenance" composites

        raster_create(self.get_dataset_filename("SAT"),
                      [best_pixel_satellite],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int16)

        # Date "provenance" composites

        raster_create(self.get_dataset_filename("DATE"),
                      [best_pixel_date],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int32)
Beispiel #6
0
    def doit(self):
        shape = (4000, 4000)
        no_data_value = NDV

        best_pixel_data = dict()

        # TODO
        if Satellite.LS8.value in self.satellites:
            bands = Ls8Arg25Bands
        else:
            bands = Ls57Arg25Bands

        for band in bands:
            best_pixel_data[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=no_data_value)

        best_pixel_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        # best_pixel_epoch = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)
        best_pixel_date = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        current_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        # current_epoch = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)
        current_date = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        metadata = None

        SATELLITE_DATA_VALUES = {Satellite.LS5: 5, Satellite.LS7: 7, Satellite.LS8: 8}

        for tile in self.get_tiles(sort=SortType.DESC):
            # Get ARG25 dataset

            dataset = tile.datasets[DatasetType.ARG25]
            _log.info("Processing ARG tile [%s]", dataset.path)

            if not metadata:
                metadata = get_dataset_metadata(dataset)

            band_data = None

            if self.apply_pq_filter:
                band_data = get_dataset_data_with_pq(dataset, tile.datasets[DatasetType.PQ25])
            else:
                band_data = get_dataset_data(dataset)

            # Create the provenance datasets

            # NOTE: need to do this BEFORE selecting the pixel since it is actually using the fact that the
            # selected pixel currently doesn't have a value

            # NOTE: band values are propagated "as a job lot" so can just check any band

            # TODO better way than just saying....RED....?
            band = bands.RED

            # Satellite

            current_satellite.fill(SATELLITE_DATA_VALUES[dataset.satellite])
            best_pixel_satellite = numpy.where(best_pixel_data[band] == no_data_value, current_satellite, best_pixel_satellite)

            # # Epoch dataset
            #
            # current_epoch.fill(calendar.timegm(tile.end_datetime.timetuple()))
            # best_pixel_epoch = numpy.where(best_pixel_data[band] == no_data_value, current_epoch, best_pixel_epoch)

            # Date dataset (20150101)

            current_date.fill(tile.end_datetime.year * 10000 + tile.end_datetime.month * 100 + tile.end_datetime.day)
            best_pixel_date = numpy.where(best_pixel_data[band] == no_data_value, current_date, best_pixel_date)

            for band in bands:
                data = band_data[band]
                # _log.debug("data = \n%s", data)

                # Replace any NO DATA best pixels with data pixels
                # TODO should I explicitly do the AND data is not NO DATA VALUE?
                best_pixel_data[band] = numpy.where(best_pixel_data[band] == no_data_value, data, best_pixel_data[band])
                # _log.debug("best pixel = \n%s", best_pixel_data[band])

            still_no_data = numpy.any(numpy.array([best_pixel_data[b] for b in bands]) == no_data_value)
            # _log.debug("still no data pixels = %s", still_no_data)

            if not still_no_data:
                break

        # Now want to mask out values in the provenance datasets if we haven't actually got a value

        # TODO better way than just saying....RED....?
        band = bands.RED

        mask = numpy.ma.masked_equal(best_pixel_data[band], NDV).mask

        best_pixel_satellite = numpy.ma.array(best_pixel_satellite, mask=mask).filled(NDV)
        # best_pixel_epoch = numpy.ma.array(best_pixel_epoch, mask=mask).fill(NDV)
        best_pixel_date = numpy.ma.array(best_pixel_date, mask=mask).filled(NDV)

        # Composite NBAR dataset

        raster_create(self.get_output_path("NBAR"), [best_pixel_data[b] for b in bands],
                      metadata.transform, metadata.projection, NDV, gdal.GDT_Int16)

        # Provenance (satellite) dataset

        raster_create(self.get_output_path("SAT"),
                      [best_pixel_satellite],
                      metadata.transform, metadata.projection, no_data_value,
                      gdal.GDT_Int16)

        # # Provenance (epoch) dataset
        #
        # raster_create(self.get_output_path("EPOCH"),
        #               [best_pixel_epoch],
        #               metadata.transform, metadata.projection, no_data_value,
        #               gdal.GDT_Int32)

        # Provenance (day of month) dataset

        raster_create(self.get_output_path("DATE"),
                      [best_pixel_date],
                      metadata.transform, metadata.projection, no_data_value,
                      gdal.GDT_Int32)
Beispiel #7
0
    def doit(self):

        _log.debug("Bare Soil Cell Task - doit()")
        shape = (4000, 4000)
        no_data_value = NDV

        best_pixel_fc = dict()

        for band in Fc25Bands:
            best_pixel_fc[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=INT16_MIN)

        best_pixel_nbar = dict()

        for band in Ls57Arg25Bands:
            best_pixel_nbar[band] = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)

        best_pixel_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        best_pixel_year = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        best_pixel_month = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        best_pixel_epoch = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        current_satellite = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        current_year = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        current_month = empty_array(shape=shape, dtype=numpy.int16, ndv=NDV)
        current_epoch = empty_array(shape=shape, dtype=numpy.int32, ndv=NDV)

        SATELLITE_DATA_VALUES = {Satellite.LS5: 5, Satellite.LS7: 7, Satellite.LS8: 8}

        metadata_nbar = None
        metadata_fc = None

        for tile in self.get_tiles():
            # Get the PQ mask

            pq = tile.datasets[DatasetType.PQ25]
            data_pq = get_dataset_data(pq, [Pq25Bands.PQ])[Pq25Bands.PQ]

            mask_pq = get_pq_mask(data_pq)

            # Get NBAR dataset

            nbar = tile.datasets[DatasetType.ARG25]
            _log.info("Processing NBAR tile [%s]", nbar.path)

            if not metadata_nbar:
                metadata_nbar = get_dataset_metadata(nbar)

            data_nbar = get_dataset_data_with_pq(nbar, Ls57Arg25Bands, tile.datasets[DatasetType.PQ25])

            # Get the NDVI mask

            red = data_nbar[Ls57Arg25Bands.RED]
            nir = data_nbar[Ls57Arg25Bands.NEAR_INFRARED]

            ndvi_data = calculate_ndvi(red, nir)

            ndvi_data = numpy.ma.masked_equal(ndvi_data, NDV)
            ndvi_data = numpy.ma.masked_outside(ndvi_data, 0, 0.3, copy=False)

            mask_ndvi = ndvi_data.mask

            # Get FC25 dataset

            fc = tile.datasets[DatasetType.FC25]
            _log.info("Processing FC tile [%s]", fc.path)

            if not metadata_fc:
                metadata_fc = get_dataset_metadata(fc)

            _log.debug("metadata fc is %s", metadata_fc)

            data_fc = get_dataset_data(fc, Fc25Bands)

            data_bare_soil = data_fc[Fc25Bands.BS]
            data_bare_soil = numpy.ma.masked_equal(data_bare_soil, -999)
            data_bare_soil = numpy.ma.masked_outside(data_bare_soil, 0, 8000)
            data_bare_soil.mask = (data_bare_soil.mask | mask_pq | mask_ndvi)
            data_bare_soil = data_bare_soil.filled(NDV)

            # Compare the bare soil value from this dataset to the current "best" value
            best_pixel_fc[Fc25Bands.BS] = numpy.fmax(best_pixel_fc[Fc25Bands.BS], data_bare_soil)

            # Now update the other best pixel datasets/bands to grab the pixels we just selected

            for band in Ls57Arg25Bands:
                best_pixel_nbar[band] = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                                       data_bare_soil,
                                                                       data_nbar[band],
                                                                       best_pixel_nbar[band])

            for band in [Fc25Bands.PV, Fc25Bands.NPV, Fc25Bands.ERROR]:
                best_pixel_fc[band] = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                                     data_bare_soil,
                                                                     data_fc[band],
                                                                     best_pixel_fc[band])

            # And now the other "provenance" data

            current_satellite.fill(SATELLITE_DATA_VALUES[fc.satellite])

            best_pixel_satellite = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                                  data_bare_soil,
                                                                  current_satellite,
                                                                  best_pixel_satellite)

            current_year.fill(tile.end_datetime_year)

            best_pixel_year = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                             data_bare_soil,
                                                             current_year,
                                                             best_pixel_year)

            current_month.fill(tile.end_datetime_month)

            best_pixel_month = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                              data_bare_soil,
                                                              current_month,
                                                              best_pixel_month)

            current_epoch.fill(calendar.timegm(tile.end_datetime.timetuple()))

            best_pixel_epoch = propagate_using_selected_pixel(best_pixel_fc[Fc25Bands.BS],
                                                              data_bare_soil,
                                                              current_epoch,
                                                              best_pixel_epoch)

        # Create the output datasets

        # FC composite

        raster_create(self.get_dataset_filename("FC"),
                      [best_pixel_fc[b] for b in Fc25Bands],
                      metadata_fc.transform, metadata_fc.projection, metadata_fc.bands[Fc25Bands.BS].no_data_value,
                      metadata_fc.bands[Fc25Bands.BS].data_type)

        # NBAR composite

        raster_create(self.get_dataset_filename("NBAR"),
                      [best_pixel_nbar[b] for b in Ls57Arg25Bands],
                      metadata_nbar.transform, metadata_nbar.projection,
                      metadata_nbar.bands[Ls57Arg25Bands.BLUE].no_data_value,
                      metadata_nbar.bands[Ls57Arg25Bands.BLUE].data_type)

        # "Provenance" composites

        raster_create(self.get_dataset_filename("SAT"),
                      [best_pixel_satellite],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int16)

        raster_create(self.get_dataset_filename("YEAR"),
                      [best_pixel_year],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int16)

        raster_create(self.get_dataset_filename("MONTH"),
                      [best_pixel_month],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int16)

        raster_create(self.get_dataset_filename("EPOCH"),
                      [best_pixel_epoch],
                      metadata_nbar.transform, metadata_nbar.projection, no_data_value,
                      gdal.GDT_Int32)