Exemplo n.º 1
0
def _map_distance_from_forest_edge(lulc_uri, biophysical_table_uri,
                                   edge_distance_uri):
    """Generates a raster of forest edge distances where each pixel is the
    distance to the edge of the forest in meters.

    Parameters:
        lulc_uri (string): path to the landcover raster that contains integer
            landcover codes
        biophysical_table_uri (string): a path to a csv table that indexes
            landcover codes to forest type, contains at least the fields
            'lucode' (landcover integer code) and 'is_forest' (0 or 1 depending
            on landcover code type)
        edge_distance_uri (string): path to output raster where each pixel
            contains the euclidian pixel distance to nearest forest edges on
            all non-nodata values of lulc_uri

    Returns:
        None"""

    # Build a list of forest lucodes
    biophysical_table = pygeoprocessing.get_lookup_from_table(
        biophysical_table_uri, 'lucode')
    forest_codes = [
        lucode for (lucode, ludata) in biophysical_table.iteritems()
        if int(ludata['is_forest']) == 1
    ]

    # Make a raster where 1 is non-forest landcover types and 0 is forest
    forest_mask_nodata = 255
    lulc_nodata = pygeoprocessing.get_nodata_from_uri(lulc_uri)

    def mask_non_forest_op(lulc_array):
        """converts forest lulc codes to 1"""
        non_forest_mask = ~numpy.in1d(lulc_array.flatten(),
                                      forest_codes).reshape(lulc_array.shape)
        nodata_mask = lulc_array == lulc_nodata
        return numpy.where(nodata_mask, forest_mask_nodata, non_forest_mask)

    non_forest_mask_uri = pygeoprocessing.temporary_filename()
    out_pixel_size = pygeoprocessing.get_cell_size_from_uri(lulc_uri)
    pygeoprocessing.vectorize_datasets([lulc_uri],
                                       mask_non_forest_op,
                                       non_forest_mask_uri,
                                       gdal.GDT_Byte,
                                       forest_mask_nodata,
                                       out_pixel_size,
                                       "intersection",
                                       vectorize_op=False)

    # Do the distance transform on non-forest pixels
    pygeoprocessing.distance_transform_edt(non_forest_mask_uri,
                                           edge_distance_uri)

    # good practice to delete temporary files when we're done with them
    os.remove(non_forest_mask_uri)
Exemplo n.º 2
0
 def test_raster_bad_matrix_iterable_input(self):
     """Verify TypeError raised when band_matrices not a list."""
     from pygeoprocessing.testing import create_raster_on_disk
     from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
     pixels = set([1])
     nodata = None
     reference = SRS_WILLAMETTE
     filename = pygeoprocessing.temporary_filename()
     with self.assertRaises(TypeError):
         create_raster_on_disk(
             pixels, reference.origin, reference.projection, nodata,
             reference.pixel_size(30), datatype='auto', filename=filename)
Exemplo n.º 3
0
 def test_raster_multiple_dtypes(self):
     """Verify TypeError raised when matrix band dtypes are mismatched."""
     from pygeoprocessing.testing import create_raster_on_disk
     from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
     pixels = [numpy.array([[0]], dtype=numpy.int),
               numpy.array([[0]], dtype=numpy.float)]
     nodata = None
     reference = SRS_WILLAMETTE
     filename = pygeoprocessing.temporary_filename()
     with self.assertRaises(TypeError):
         create_raster_on_disk(
             pixels, reference.origin, reference.projection, nodata,
             reference.pixel_size(30), datatype='auto', filename=filename)
Exemplo n.º 4
0
    def test_raster_nodata_notset(self):
        """When nodata=None, a nodata value should not be set."""
        from pygeoprocessing.testing import create_raster_on_disk
        from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
        pixels = [numpy.array([[0]])]
        nodata = None
        reference = SRS_WILLAMETTE
        filename = pygeoprocessing.temporary_filename()
        create_raster_on_disk(
            pixels, reference.origin, reference.projection, nodata,
            reference.pixel_size(30), datatype='auto', filename=filename)

        set_nodata_value = pygeoprocessing.get_nodata_from_uri(filename)
        self.assertEqual(set_nodata_value, None)
Exemplo n.º 5
0
 def test_mismatched_bands(self):
     """When band sizes are mismatched, TypeError should be raised."""
     from pygeoprocessing.testing import create_raster_on_disk
     from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
     pixels = [
         numpy.ones((5, 5)),
         numpy.ones((4, 4)),
         numpy.ones((7, 7))
     ]
     nodata = 0
     reference = SRS_WILLAMETTE
     filename = pygeoprocessing.temporary_filename()
     with self.assertRaises(TypeError):
         create_raster_on_disk(
             pixels, reference.origin, reference.projection, nodata,
             reference.pixel_size(30), datatype='auto',
             filename=filename)
Exemplo n.º 6
0
    def test_invalid_raster_bands(self):
        """Verify an error when raster matrices not in a list."""
        from pygeoprocessing.testing import create_raster_on_disk
        from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
        pixels = numpy.ones((4, 4), numpy.uint16)
        nodata = 0
        reference = SRS_WILLAMETTE
        filename = pygeoprocessing.temporary_filename()

        # Error raised when `pixels` is not a list.  List of 2D matrices
        # expected.
        with self.assertRaises(TypeError):
            create_raster_on_disk(
                pixels, reference.origin,
                reference.projection, nodata,
                reference.pixel_size(30), datatype='auto',
                filename=filename)
Exemplo n.º 7
0
    def test_init(self):
        pixels = numpy.ones((4, 4))
        nodata = 0
        reference = sampledata.SRS_COLOMBIA_30M
        filename = pygeoprocessing.temporary_filename()

        sampledata.raster(pixels, nodata, reference, filename)

        self.assertTrue(os.path.exists(filename))

        dataset = gdal.Open(filename)
        self.assertEqual(dataset.RasterXSize, 4)
        self.assertEqual(dataset.RasterYSize, 4)

        band = dataset.GetRasterBand(1)
        band_nodata = band.GetNoDataValue()
        self.assertEqual(band_nodata, nodata)
Exemplo n.º 8
0
    def test_vect_datasets_identity_aoi(self):
        """PGP.geoprocessing: vectorize_datasets f(x)=x with AOI."""
        pixel_matrix = numpy.ones((5, 5), numpy.int16)
        reference = sampledata.SRS_COLOMBIA
        nodata = -1
        pygeoprocessing.testing.create_raster_on_disk(
            [pixel_matrix],
            reference.origin,
            reference.projection,
            nodata,
            reference.pixel_size(30),
            filename=self.raster_filename)

        polygons = [
            Polygon([
                (reference.origin[0] + reference.pixel_size(30)[0] * 0,
                 reference.origin[1] + reference.pixel_size(30)[1] * 0),
                (reference.origin[0] + reference.pixel_size(30)[0] * 5,
                 reference.origin[1] + reference.pixel_size(30)[1] * 0),
                (reference.origin[0] + reference.pixel_size(30)[0] * 5,
                 reference.origin[1] + reference.pixel_size(30)[1] * 5),
                (reference.origin[0] + reference.pixel_size(30)[0] * 0,
                 reference.origin[1] + reference.pixel_size(30)[1] * 5),
                (reference.origin[0] + reference.pixel_size(30)[0] * 0,
                 reference.origin[1] + reference.pixel_size(30)[1] * 0),
            ]),
        ]
        pygeoprocessing.testing.create_vector_on_disk(
            polygons, reference.projection, filename=self.aoi_filename)

        out_filename = pygeoprocessing.temporary_filename()
        pygeoprocessing.vectorize_datasets([self.raster_filename],
                                           lambda x: x,
                                           out_filename,
                                           gdal.GDT_Int32,
                                           nodata,
                                           30,
                                           'intersection',
                                           aoi_uri=self.aoi_filename)

        pygeoprocessing.testing.assert_rasters_equal(self.raster_filename,
                                                     out_filename,
                                                     rel_tol=1e-9)
Exemplo n.º 9
0
    def test_raster_autodtype(self):
        """Verify automatic detection of a matrix's dtype."""
        from pygeoprocessing.testing import create_raster_on_disk
        from pygeoprocessing.testing.sampledata import SRS_COLOMBIA
        pixels = numpy.ones((4, 4), numpy.uint16)
        nodata = 0
        reference = SRS_COLOMBIA
        filename = pygeoprocessing.temporary_filename()

        create_raster_on_disk([pixels], reference.origin,
                              reference.projection,
                              nodata, reference.pixel_size(30),
                              datatype='auto',
                              filename=filename)

        dataset = gdal.Open(filename)
        band = dataset.GetRasterBand(1)
        band_dtype = band.DataType

        # numpy.uint16 should translate to gdal.GDT_UInt16
        self.assertEqual(band_dtype, gdal.GDT_UInt16)
Exemplo n.º 10
0
    def test_vect_datasets_identity(self):
        """PGP.geoprocessing: vectorize_datasets f(x)=x."""
        pixel_matrix = numpy.ones((5, 5), numpy.int16)
        reference = sampledata.SRS_COLOMBIA
        nodata = -1
        pygeoprocessing.testing.create_raster_on_disk(
            [pixel_matrix],
            reference.origin,
            reference.projection,
            nodata,
            reference.pixel_size(30),
            filename=self.raster_filename)

        out_filename = pygeoprocessing.temporary_filename()
        pygeoprocessing.vectorize_datasets([self.raster_filename], lambda x: x,
                                           out_filename, gdal.GDT_Int32,
                                           nodata, 30, 'intersection')

        pygeoprocessing.testing.assert_rasters_equal(self.raster_filename,
                                                     out_filename,
                                                     rel_tol=1e-9)
Exemplo n.º 11
0
    def test_vect_datasets_(self):
        """PGP.geoprocessing: vect..._datasets expected error for non-list."""
        pixel_matrix = numpy.ones((5, 5), numpy.int16)
        reference = sampledata.SRS_COLOMBIA
        nodata = -1
        pygeoprocessing.testing.create_raster_on_disk(
            [pixel_matrix],
            reference.origin,
            reference.projection,
            nodata,
            reference.pixel_size(30),
            filename=self.raster_filename)

        out_filename = pygeoprocessing.temporary_filename()
        with self.assertRaises(ValueError):
            # intentionally passing a filename rather than a list of files
            # to get an expected exception
            pygeoprocessing.vectorize_datasets(self.raster_filename,
                                               lambda x: x, out_filename,
                                               gdal.GDT_Int32, nodata, 30,
                                               'intersection')
Exemplo n.º 12
0
    def test_multi_bands(self):
        """Verify that we can create multi-band rasters."""
        from pygeoprocessing.testing import create_raster_on_disk
        from pygeoprocessing.testing.sampledata import SRS_WILLAMETTE
        pixels = [
            numpy.ones((5, 5)),
            numpy.zeros((5, 5)),
            numpy.multiply(numpy.ones((5, 5)), 3),
        ]
        nodata = 0
        reference = SRS_WILLAMETTE
        filename = pygeoprocessing.temporary_filename()

        create_raster_on_disk(pixels, reference.origin,
                              reference.projection, nodata,
                              reference.pixel_size(30),
                              datatype='auto', filename=filename)

        # check that the three bands have been written properly.
        dataset = gdal.Open(filename)
        for band_num, input_matrix in zip(range(1, 4), pixels):
            band = dataset.GetRasterBand(band_num)
            written_matrix = band.ReadAsArray()
            numpy.testing.assert_almost_equal(input_matrix, written_matrix)
Exemplo n.º 13
0
def _convert_landscape(
        base_lulc_uri, replacement_lucode, area_to_convert,
        focal_landcover_codes, convertible_type_list, score_weight, n_steps,
        smooth_distance_from_edge_uri, output_landscape_raster_uri,
        stats_uri):
    """Expand replacement lucodes in relation to the focal lucodes.

    If the sign on `score_weight` is positive, expansion occurs marches
    away from the focal types, while if `score_weight` is negative conversion
    marches toward the focal types.

    Parameters:
        base_lulc_uri (string): path to landcover raster that will be used as
            the base landcover map to agriculture pixels
        replacement_lucode (int): agriculture landcover code type found in the
            raster at `base_lulc_uri`
        area_to_convert (float): area (Ha) to convert to agriculture
        focal_landcover_codes (list of int): landcover codes that are used to
            calculate proximity
        convertible_type_list (list of int): landcover codes that are allowable
            to be converted to agriculture
        score_weight (float): this value is used to multiply the distance from
            the focal landcover types when prioritizing which pixels in
            `convertable_type_list` are to be converted.  If negative,
            conversion occurs toward the focal types, if positive occurs away
            from the focal types.
        n_steps (int): number of steps to convert the landscape.  On each step
            the distance transform will be applied on the
            current value of the `focal_landcover_codes` pixels in
            `output_landscape_raster_uri`.  On the first step the distance
            is calculated from `base_lulc_uri`.
        smooth_distance_from_edge_uri (string): an intermediate output showing
            the pixel distance from the edge of the base landcover types
        output_landscape_raster_uri (string): an output raster that will
            contain the final fragmented forest layer.
        stats_uri (string): a path to an output csv that records the number
            type, and area of pixels converted in `output_landscape_raster_uri`

    Returns:
        None.
    """
    tmp_file_registry = {
        'non_base_mask': pygeoprocessing.temporary_filename(),
        'base_mask': pygeoprocessing.temporary_filename(),
        'gaussian_kernel': pygeoprocessing.temporary_filename(),
        'distance_from_base_mask_edge': pygeoprocessing.temporary_filename(),
        'distance_from_non_base_mask_edge':
            pygeoprocessing.temporary_filename(),
        'convertible_distances': pygeoprocessing.temporary_filename(),
        'smooth_distance_from_edge': pygeoprocessing.temporary_filename(),
        'distance_from_edge': pygeoprocessing.temporary_filename(),
    }
    # a sigma of 1.0 gives nice visual results to smooth pixel level artifacts
    # since a pixel is the 1.0 unit
    _make_gaussian_kernel_uri(1.0, tmp_file_registry['gaussian_kernel'])

    # create the output raster first as a copy of the base landcover so it can
    # be looped on for each step
    lulc_nodata = pygeoprocessing.get_nodata_from_uri(base_lulc_uri)
    pixel_size_out = pygeoprocessing.get_cell_size_from_uri(base_lulc_uri)
    mask_nodata = 2
    pygeoprocessing.vectorize_datasets(
        [base_lulc_uri], lambda x: x, output_landscape_raster_uri,
        gdal.GDT_Int32, lulc_nodata, pixel_size_out, "intersection",
        vectorize_op=False, datasets_are_pre_aligned=True)

    # convert everything furthest from edge for each of n_steps
    pixel_area_ha = (
            pygeoprocessing.get_cell_size_from_uri(base_lulc_uri)**2 / 10000.0)
    max_pixels_to_convert = int(math.ceil(area_to_convert / pixel_area_ha))
    convertible_type_nodata = -1
    pixels_left_to_convert = max_pixels_to_convert
    pixels_to_convert = max_pixels_to_convert / n_steps
    stats_cache = collections.defaultdict(int)

    # pylint complains when these are defined inside the loop
    invert_mask = None
    distance_nodata = None

    for step_index in xrange(n_steps):
        LOGGER.info('step %d of %d', step_index+1, n_steps)
        pixels_left_to_convert -= pixels_to_convert

        # Often the last segement of the steps will overstep the  number of
        # pixels to convert, this check converts the exact amount
        if pixels_left_to_convert < 0:
            pixels_to_convert += pixels_left_to_convert

        # create distance transforms for inside and outside the base lulc codes
        LOGGER.info('create distance transform for current landcover')
        for invert_mask, mask_id, distance_id in [
            (False, 'non_base_mask', 'distance_from_non_base_mask_edge'),
            (True, 'base_mask', 'distance_from_base_mask_edge')]:

            def _mask_base_op(lulc_array):
                """Create a mask of valid non-base pixels only."""
                base_mask = numpy.in1d(
                    lulc_array.flatten(), focal_landcover_codes).reshape(
                    lulc_array.shape)
                if invert_mask:
                    base_mask = ~base_mask
                return numpy.where(
                    lulc_array == lulc_nodata, mask_nodata, base_mask)
            pygeoprocessing.vectorize_datasets(
                [output_landscape_raster_uri], _mask_base_op,
                tmp_file_registry[mask_id], gdal.GDT_Byte,
                mask_nodata, pixel_size_out, "intersection",
                vectorize_op=False, datasets_are_pre_aligned=True)

            # create distance transform for the current mask
            pygeoprocessing.distance_transform_edt(
                tmp_file_registry[mask_id], tmp_file_registry[distance_id])

        # combine inner and outer distance transforms into one
        distance_nodata = pygeoprocessing.get_nodata_from_uri(
            tmp_file_registry['distance_from_base_mask_edge'])

        def _combine_masks(base_distance_array, non_base_distance_array):
            """create a mask of valid non-base pixels only."""
            result = non_base_distance_array
            valid_base_mask = base_distance_array > 0.0
            result[valid_base_mask] = base_distance_array[valid_base_mask]
            return result
        pygeoprocessing.vectorize_datasets(
            [tmp_file_registry['distance_from_base_mask_edge'],
             tmp_file_registry['distance_from_non_base_mask_edge']],
            _combine_masks, tmp_file_registry['distance_from_edge'],
            gdal.GDT_Float32, distance_nodata, pixel_size_out, "intersection",
            vectorize_op=False, datasets_are_pre_aligned=True)

        # smooth the distance transform to avoid scanline artifacts
        pygeoprocessing.convolve_2d_uri(
            tmp_file_registry['distance_from_edge'],
            tmp_file_registry['gaussian_kernel'],
            smooth_distance_from_edge_uri)

        # turn inside and outside masks into a single mask
        def _mask_to_convertible_codes(distance_from_base_edge, lulc):
            """Mask out the distance transform to a set of lucodes."""
            convertible_mask = numpy.in1d(
                lulc.flatten(), convertible_type_list).reshape(lulc.shape)
            return numpy.where(
                convertible_mask, distance_from_base_edge,
                convertible_type_nodata)
        pygeoprocessing.vectorize_datasets(
            [smooth_distance_from_edge_uri, output_landscape_raster_uri],
            _mask_to_convertible_codes,
            tmp_file_registry['convertible_distances'], gdal.GDT_Float32,
            convertible_type_nodata, pixel_size_out, "intersection",
            vectorize_op=False, datasets_are_pre_aligned=True)

        LOGGER.info(
            'convert %d pixels to lucode %d', pixels_to_convert,
            replacement_lucode)
        _convert_by_score(
            tmp_file_registry['convertible_distances'], pixels_to_convert,
            output_landscape_raster_uri, replacement_lucode, stats_cache,
            score_weight)

    _log_stats(stats_cache, pixel_area_ha, stats_uri)
    for filename in tmp_file_registry.values():
        os.remove(filename)
def execute(args):
    """This function invokes the seasonal water yield model given
        URI inputs of files. It may write log, warning, or error messages to
        stdout.
    """

    alpha_m = float(fractions.Fraction(args['alpha_m']))
    beta_i = float(fractions.Fraction(args['beta_i']))
    gamma = float(fractions.Fraction(args['gamma']))

    try:
        file_suffix = args['results_suffix']
        if file_suffix != "" and not file_suffix.startswith('_'):
            file_suffix = '_' + file_suffix
    except KeyError:
        file_suffix = ''

    pygeoprocessing.geoprocessing.create_directories([args['workspace_dir']])

    qfi_uri = os.path.join(args['workspace_dir'], 'qf%s.tif' % file_suffix)
    cn_uri = os.path.join(args['workspace_dir'], 'cn%s.tif' % file_suffix)

    lulc_uri_aligned = pygeoprocessing.temporary_filename()
    dem_uri_aligned = pygeoprocessing.temporary_filename()

    pixel_size = pygeoprocessing.geoprocessing.get_cell_size_from_uri(
        args['lulc_uri'])

    LOGGER.info('Aligning and clipping dataset list')
    input_align_list = [args['lulc_uri'], args['dem_uri']]
    output_align_list = [lulc_uri_aligned, dem_uri_aligned]

    if not args['user_defined_recharge']:
        precip_uri_list = []
        et0_uri_list = []

        et0_dir_list = [
            os.path.join(args['et0_dir'], f) for f in os.listdir(args['et0_dir'])]
        precip_dir_list = [
            os.path.join(args['precip_dir'], f) for f in os.listdir(
                args['precip_dir'])]

        qf_monthly_uri_list = []
        for m_index in range(1, N_MONTHS + 1):
            qf_monthly_uri_list.append(
                os.path.join(
                    args['workspace_dir'], 'qf_%d%s.tif' %
                    (m_index, file_suffix)))

        for month_index in range(1, N_MONTHS + 1):
            month_file_match = re.compile(r'.*[^\d]%d\.[^.]+$' % month_index)

            for data_type, dir_list, uri_list in [
                    ('et0', et0_dir_list, et0_uri_list),
                    ('Precip', precip_dir_list, precip_uri_list)]:

                file_list = [x for x in dir_list if month_file_match.match(x)]
                if len(file_list) == 0:
                    raise ValueError(
                        "No %s found for month %d" % (data_type, month_index))
                if len(file_list) > 1:
                    raise ValueError(
                        "Ambiguous set of files found for month %d: %s" %
                        (month_index, file_list))
                uri_list.append(file_list[0])

        soil_group_uri_aligned = pygeoprocessing.temporary_filename()

        #pre align all the datasets
        precip_uri_aligned_list = [
            pygeoprocessing.geoprocessing.temporary_filename() for _ in
            range(len(precip_uri_list))]
        et0_uri_aligned_list = [
            pygeoprocessing.geoprocessing.temporary_filename() for _ in
            range(len(precip_uri_list))]
        input_align_list = (
            precip_uri_list + [args['soil_group_uri']] + et0_uri_list +
            input_align_list)
        output_align_list = (
            precip_uri_aligned_list + [soil_group_uri_aligned] +
            et0_uri_aligned_list + output_align_list)

    interpolate_list = ['nearest'] * len(input_align_list)
    align_index = 0
    if args['user_defined_recharge']:
        input_align_list.append(args['recharge_uri'])
        recharge_aligned_uri = (
            pygeoprocessing.geoprocessing.temporary_filename())
        output_align_list.append(recharge_aligned_uri)
        interpolate_list.append('nearest')
        align_index = len(interpolate_list) - 1

    pygeoprocessing.geoprocessing.align_dataset_list(
        input_align_list, output_align_list,
        interpolate_list,
        pixel_size, 'intersection', align_index, aoi_uri=args['aoi_uri'],
        assert_datasets_projected=True)

    flow_dir_uri = os.path.join(
        args['workspace_dir'], 'flow_dir%s.tif' % file_suffix)
    LOGGER.info('calc flow direction')
    pygeoprocessing.routing.flow_direction_d_inf(dem_uri_aligned, flow_dir_uri)

    flow_accum_uri = os.path.join(
        args['workspace_dir'], 'flow_accum%s.tif' % file_suffix)
    LOGGER.info('calc flow accumulation')
    pygeoprocessing.routing.flow_accumulation(
        flow_dir_uri, dem_uri_aligned, flow_accum_uri)
    stream_uri = os.path.join(
        args['workspace_dir'], 'stream%s.tif' % file_suffix)
    threshold_flow_accumulation = 1000
    pygeoprocessing.routing.stream_threshold(
        flow_accum_uri, threshold_flow_accumulation, stream_uri)

    LOGGER.info('calculating flow weights')
    outflow_weights_uri = os.path.join(
        args['workspace_dir'], 'outflow_weights%s.tif' % file_suffix)
    outflow_direction_uri = os.path.join(
        args['workspace_dir'], 'outflow_direction%s.tif' % file_suffix)
    seasonal_water_yield_core.calculate_flow_weights(
        flow_dir_uri, outflow_weights_uri, outflow_direction_uri)


    si_uri = os.path.join(args['workspace_dir'], 'si%s.tif' % file_suffix)

    biophysical_table = pygeoprocessing.geoprocessing.get_lookup_from_table(
        args['biophysical_table_uri'], 'lucode')

    kc_lookup = dict([
        (lucode, biophysical_table[lucode]['kc']) for lucode in
        biophysical_table])

    recharge_avail_uri = os.path.join(
        args['workspace_dir'], 'recharge_avail%s.tif' % file_suffix)
    r_sum_avail_uri = os.path.join(
        args['workspace_dir'], 'r_sum_avail%s.tif' % file_suffix)
    vri_uri = os.path.join(args['workspace_dir'], 'vri%s.tif' % file_suffix)
    aet_uri = os.path.join(args['workspace_dir'], 'aet%s.tif' % file_suffix)

    r_sum_avail_pour_uri = os.path.join(
        args['workspace_dir'], 'r_sum_avail_pour%s.tif' % file_suffix)
    sf_uri = os.path.join(
        args['workspace_dir'], 'sf%s.tif' % file_suffix)
    sf_down_uri = os.path.join(
        args['workspace_dir'], 'sf_down%s.tif' % file_suffix)
    qb_out_uri = os.path.join(
        args['workspace_dir'], 'qb%s.txt' % file_suffix)

    LOGGER.info('classifying kc')
    kc_uri = os.path.join(args['workspace_dir'], 'kc%s.tif' % file_suffix)
    pygeoprocessing.geoprocessing.reclassify_dataset_uri(
        lulc_uri_aligned, kc_lookup, kc_uri, gdal.GDT_Float32, -1)

    LOGGER.info('calculate slow flow')
    if not args['user_defined_recharge']:
        LOGGER.info('loading number of monthly events')
        rain_events_lookup = (
            pygeoprocessing.geoprocessing.get_lookup_from_table(
                args['rain_events_table_uri'], 'month'))
        n_events = dict([
            (month, rain_events_lookup[month]['events'])
            for month in rain_events_lookup])

        LOGGER.info('calculating curve number')
        soil_nodata = pygeoprocessing.get_nodata_from_uri(
            args['soil_group_uri'])
        map_soil_type_to_header = {
            1: 'cn_a',
            2: 'cn_b',
            3: 'cn_c',
            4: 'cn_d',
        }
        cn_nodata = -1
        lulc_to_soil = {}
        lulc_nodata = pygeoprocessing.get_nodata_from_uri(lulc_uri_aligned)
        for soil_id, soil_column in map_soil_type_to_header.iteritems():
            lulc_to_soil[soil_id] = {
                'lulc_values': [],
                'cn_values': []
            }
            for lucode in sorted(biophysical_table.keys() + [lulc_nodata]):
                try:
                    lulc_to_soil[soil_id]['cn_values'].append(
                        biophysical_table[lucode][soil_column])
                    lulc_to_soil[soil_id]['lulc_values'].append(lucode)
                except KeyError:
                    if lucode == lulc_nodata:
                        lulc_to_soil[soil_id]['lulc_values'].append(lucode)
                        lulc_to_soil[soil_id]['cn_values'].append(cn_nodata)
                    else:
                        raise
            lulc_to_soil[soil_id]['lulc_values'] = (
                numpy.array(lulc_to_soil[soil_id]['lulc_values'],
                        dtype=numpy.int32))
            lulc_to_soil[soil_id]['cn_values'] = (
                numpy.array(lulc_to_soil[soil_id]['cn_values'],
                        dtype=numpy.float32))

        def cn_op(lulc_array, soil_group_array):
            """map lulc code and soil to a curve number"""
            cn_result = numpy.empty(lulc_array.shape)
            cn_result[:] = cn_nodata
            for soil_group_id in numpy.unique(soil_group_array):
                if soil_group_id == soil_nodata:
                    continue
                current_soil_mask = (soil_group_array == soil_group_id)
                index = numpy.digitize(
                    lulc_array.ravel(),
                    lulc_to_soil[soil_group_id]['lulc_values'], right=True)
                cn_values = (
                    lulc_to_soil[soil_group_id]['cn_values'][index]).reshape(
                        lulc_array.shape)
                cn_result[current_soil_mask] = cn_values[current_soil_mask]
            return cn_result

        cn_nodata = -1
        pygeoprocessing.vectorize_datasets(
            [lulc_uri_aligned, soil_group_uri_aligned], cn_op, cn_uri,
            gdal.GDT_Float32, cn_nodata, pixel_size, 'intersection',
            vectorize_op=False, datasets_are_pre_aligned=True)

        LOGGER.info('calculate quick flow')
        calculate_quick_flow(
            precip_uri_aligned_list,
            lulc_uri_aligned, cn_uri, n_events, stream_uri, qfi_uri,
            qf_monthly_uri_list, si_uri)

        recharge_uri = os.path.join(
            args['workspace_dir'], 'recharge%s.tif' % file_suffix)
        seasonal_water_yield_core.calculate_recharge(
            precip_uri_aligned_list, et0_uri_aligned_list, qf_monthly_uri_list,
            flow_dir_uri, outflow_weights_uri, outflow_direction_uri,
            dem_uri_aligned, lulc_uri_aligned, kc_lookup, alpha_m, beta_i,
            gamma, stream_uri, recharge_uri, recharge_avail_uri,
            r_sum_avail_uri, aet_uri, kc_uri)
    else:
        recharge_uri = recharge_aligned_uri
        recharge_nodata = pygeoprocessing.geoprocessing.get_nodata_from_uri(
            recharge_uri)
        def calc_recharge_avail(recharge_array):
            recharge_threshold = recharge_array * gamma
            recharge_threshold[recharge_threshold < 0] = 0.0
            return numpy.where(
                recharge_array != recharge_nodata,
                recharge_threshold, recharge_nodata)

        #calc recharge avail
        pygeoprocessing.geoprocessing.vectorize_datasets(
            [recharge_aligned_uri], calc_recharge_avail, recharge_avail_uri,
            gdal.GDT_Float32, recharge_nodata, pixel_size, 'intersection',
            vectorize_op=False, datasets_are_pre_aligned=True)
        #calc r_sum_avail with flux accumulation
        loss_uri = pygeoprocessing.geoprocessing.temporary_filename()
        zero_absorption_source_uri = (
            pygeoprocessing.geoprocessing.temporary_filename())
        pygeoprocessing.make_constant_raster_from_base_uri(
            dem_uri_aligned, 0.0, zero_absorption_source_uri)

        pygeoprocessing.routing.route_flux(
            flow_dir_uri, dem_uri_aligned, recharge_avail_uri,
            zero_absorption_source_uri, loss_uri, r_sum_avail_uri, 'flux_only',
            include_source=False)

    #calcualte Qb as the sum of recharge_avail over the aoi
    qb_results = pygeoprocessing.geoprocessing.aggregate_raster_values_uri(
        recharge_avail_uri, args['aoi_uri'])

    qb_result = qb_results.total[9999] / qb_results.n_pixels[9999]
    #9999 is the value used to index fields if no shapefile ID is provided
    qb_file = open(qb_out_uri, 'w')
    qb_file.write("%f\n" % qb_result)
    qb_file.close()
    LOGGER.info("Qb = %f", qb_result)

    pixel_size = pygeoprocessing.geoprocessing.get_cell_size_from_uri(
        recharge_uri)
    ri_nodata = pygeoprocessing.geoprocessing.get_nodata_from_uri(recharge_uri)

    def vri_op(ri_array):
        """calc vri index"""
        return numpy.where(
            ri_array != ri_nodata, ri_array / qb_result, ri_nodata)

    pygeoprocessing.geoprocessing.vectorize_datasets(
        [recharge_uri], vri_op, vri_uri,
        gdal.GDT_Float32, ri_nodata, pixel_size, 'intersection',
        vectorize_op=False, datasets_are_pre_aligned=True)

    LOGGER.info('calculating r_sum_avail_pour')
    seasonal_water_yield_core.calculate_r_sum_avail_pour(
        r_sum_avail_uri, outflow_weights_uri, outflow_direction_uri,
        r_sum_avail_pour_uri)

    LOGGER.info('calculating slow flow')
    print dem_uri_aligned, recharge_avail_uri, r_sum_avail_uri,\
        r_sum_avail_pour_uri, outflow_direction_uri, outflow_weights_uri,\
        stream_uri, sf_uri, sf_down_uri

    seasonal_water_yield_core.route_sf(
        dem_uri_aligned, recharge_avail_uri, r_sum_avail_uri,
        r_sum_avail_pour_uri, outflow_direction_uri, outflow_weights_uri,
        stream_uri, sf_uri, sf_down_uri)

    LOGGER.info('  (\\w/)  SWY Complete!')
    LOGGER.info('  (..  \\ ')
    LOGGER.info(' _/  )  \\______')
    LOGGER.info('(oo /\'\\        )`,')
    LOGGER.info(' `--\' (v  __( / ||')
    LOGGER.info('       |||  ||| ||')
    LOGGER.info('      //_| //_|')
Exemplo n.º 15
0
 def setUp(self):
     """Predefine filename as something temporary."""
     self.raster_filename = pygeoprocessing.temporary_filename()
     self.aoi_filename = pygeoprocessing.temporary_filename()
     os.remove(self.aoi_filename)
def execute(args):
    """Main entry point for proximity based scenario generator model.

    Parameters:
        args['workspace_dir'] (string): output directory for intermediate,
            temporary, and final files
        args['results_suffix'] (string): (optional) string to append to any
            output files
        args['base_lulc_uri'] (string): path to the base landcover map
        args['replacment_lucode'] (string or int): code to replace when
            converting pixels
        args['area_to_convert'] (string or float): max area (Ha) to convert
        args['focal_landcover_codes'] (string): a space separated string of
            landcover codes that are used to determine the proximity when
            refering to "towards" or "away" from the base landcover codes
        args['convertible_landcover_codes'] (string): a space separated string
            of landcover codes that can be converted in the generation phase
            found in `args['base_lulc_uri']`.
        args['n_fragmentation_steps'] (string): an int as a string indicating
            the number of steps to take for the fragmentation conversion
        args['aoi_uri'] (string): (optional) path to a shapefile that indicates
            an area of interest.  If present, the expansion scenario operates
            only under that AOI and the output raster is clipped to that shape.
        args['convert_farthest_from_edge'] (boolean): if True will run the
            conversion simulation starting from the furthest pixel from the
            edge and work inwards.  Workspace will contain output files named
            'toward_base{suffix}.{tif,csv}.
        args['convert_nearest_to_edge'] (boolean): if True will run the
            conversion simulation starting from the nearest pixel on the
            edge and work inwards.  Workspace will contain output files named
            'toward_base{suffix}.{tif,csv}.

    Returns:
        None.
    """

    if (not args['convert_farthest_from_edge']
            and not args['convert_nearest_to_edge']):
        raise ValueError("Neither scenario was selected.")

    # append a _ to the suffix if it's not empty and doesn't already have one
    try:
        file_suffix = args['results_suffix']
        if file_suffix != "" and not file_suffix.startswith('_'):
            file_suffix = '_' + file_suffix
    except KeyError:
        file_suffix = ''

    #create working directories
    output_dir = os.path.join(args['workspace_dir'])
    intermediate_dir = os.path.join(args['workspace_dir'],
                                    'intermediate_outputs')
    tmp_dir = os.path.join(args['workspace_dir'], 'tmp')
    pygeoprocessing.geoprocessing.create_directories(
        [output_dir, intermediate_dir, tmp_dir])

    area_to_convert = float(args['area_to_convert'])
    replacement_lucode = int(args['replacment_lucode'])

    # convert all the input strings to lists of ints
    convertible_type_list = numpy.array(
        [int(x) for x in args['convertible_landcover_codes'].split()])
    focal_landcover_codes = numpy.array(
        [int(x) for x in args['focal_landcover_codes'].split()])

    if 'aoi_uri' in args and args['aoi_uri'] != '':
        #clip base lulc to a new raster
        base_lulc_uri = pygeoprocessing.temporary_filename()
        pygeoprocessing.clip_dataset_uri(args['base_lulc_uri'],
                                         args['aoi_uri'],
                                         base_lulc_uri,
                                         assert_projections=True,
                                         all_touched=False)
    else:
        base_lulc_uri = args['base_lulc_uri']

    scenarios = [(args['convert_farthest_from_edge'], 'farthest_from_edge',
                  -1.0),
                 (args['convert_nearest_to_edge'], 'nearest_to_edge', 1.0)]

    for scenario_enabled, basename, score_weight in scenarios:
        if not scenario_enabled:
            continue
        LOGGER.info('executing %s scenario', basename)
        output_landscape_raster_uri = os.path.join(
            output_dir, basename + file_suffix + '.tif')
        stats_uri = os.path.join(output_dir, basename + file_suffix + '.csv')
        distance_from_edge_uri = os.path.join(
            intermediate_dir, basename + '_distance' + file_suffix + '.tif')
        _convert_landscape(base_lulc_uri, replacement_lucode, area_to_convert,
                           focal_landcover_codes, convertible_type_list,
                           score_weight, int(args['n_fragmentation_steps']),
                           distance_from_edge_uri, output_landscape_raster_uri,
                           stats_uri)