コード例 #1
0
ファイル: test_stormwater.py プロジェクト: natcap/invest
def to_raster(array,
              path,
              nodata=-1,
              pixel_size=(20, -20),
              origin=(0, 0),
              epsg=3857,
              raster_driver_creation_tuple=opts_tuple):
    """Wrap around pygeoprocessing.numpy_array_to_raster to set defaults.

    Sets some reasonable defaults for ``numpy_array_to_raster`` and takes care
    of setting up a WKT spatial reference so that it can be done in one line.

    Args:
        array (numpy.ndarray): array to be written to ``path`` as a raster
        path (str): raster path to write ``array` to
        nodata (float): nodata value to pass to ``numpy_array_to_raster``
        pixel_size (tuple(float, float)): pixel size value to pass to
            ``numpy_array_to_raster``
        origin (tuple(float, float)): origin value to pass to
            ``numpy_array_to_raster``
        epsg (int): EPSG code used to instantiate a spatial reference that is
            passed to ``numpy_array_to_raster`` in WKT format
        raster_driver_creation_tuple (tuple): a tuple containing a GDAL driver
            name string as the first element and a GDAL creation options
            tuple/list as the second.

    Returns:
        None
    """
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(epsg)
    projection_wkt = srs.ExportToWkt()
    pygeoprocessing.numpy_array_to_raster(array, nodata, pixel_size, origin,
                                          projection_wkt, path)
コード例 #2
0
    def test_transition_table(self):
        """CBC Preprocessor: Test creation of transition table."""
        from natcap.invest.coastal_blue_carbon import preprocessor

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)
        matrix_a = numpy.array([[0, 1], [0, 1], [0, 1]], dtype=numpy.int16)
        filename_a = os.path.join(self.workspace_dir, 'raster_a.tif')
        pygeoprocessing.numpy_array_to_raster(matrix_a, -1, (100, -100),
                                              origin, projection_wkt,
                                              filename_a)

        matrix_b = numpy.array([[0, 1], [1, 0], [-1, -1]], dtype=numpy.int16)
        filename_b = os.path.join(self.workspace_dir, 'raster_b.tif')
        pygeoprocessing.numpy_array_to_raster(matrix_b, -1, (100, -100),
                                              origin, projection_wkt,
                                              filename_b)

        landcover_table_path = os.path.join(self.workspace_dir,
                                            'lulc_table.csv')
        with open(landcover_table_path, 'w') as lulc_csv:
            lulc_csv.write('code,lulc-class,is_coastal_blue_carbon_habitat\n')
            lulc_csv.write('0,mangrove,True\n')
            lulc_csv.write('1,parking lot,False\n')

        landcover_table = utils.build_lookup_from_csv(landcover_table_path,
                                                      'code')
        target_table_path = os.path.join(self.workspace_dir,
                                         'transition_table.csv')

        # Remove landcover code 1 from the table; expect error.
        del landcover_table[1]
        with self.assertRaises(ValueError) as context:
            preprocessor._create_transition_table(landcover_table,
                                                  [filename_a, filename_b],
                                                  target_table_path)

        self.assertIn('missing a row with the landuse code 1',
                      str(context.exception))

        # Re-load the landcover table
        landcover_table = utils.build_lookup_from_csv(landcover_table_path,
                                                      'code')
        preprocessor._create_transition_table(landcover_table,
                                              [filename_a, filename_b],
                                              target_table_path)

        with open(target_table_path) as transition_table:
            self.assertEqual(transition_table.readline(),
                             'lulc-class,mangrove,parking lot\n')
            self.assertEqual(transition_table.readline(),
                             'mangrove,accum,disturb\n')
            self.assertEqual(transition_table.readline(),
                             'parking lot,accum,NCC\n')

            # After the above lines is a blank line, then the legend.
            # Deliberately not testing the legend.
            self.assertEqual(transition_table.readline(), '\n')
コード例 #3
0
    def test_add_rasters(self):
        """CBC: Check that we can sum rasters."""
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84 / UTM zone 31 S
        wkt = srs.ExportToWkt()

        raster_a_path = os.path.join(self.workspace_dir, 'a.tif')
        pygeoprocessing.numpy_array_to_raster(
            numpy.array([[5, 15, 12, 15]], dtype=numpy.uint8), 15, (2, -2),
            (2, -2), wkt, raster_a_path)

        raster_b_path = os.path.join(self.workspace_dir, 'b.tif')
        pygeoprocessing.numpy_array_to_raster(
            numpy.array([[3, 4, 5, 5]], dtype=numpy.uint8), 5, (2, -2),
            (2, -2), wkt, raster_b_path)

        target_path = os.path.join(self.workspace_dir, 'output.tif')
        coastal_blue_carbon._sum_n_rasters([raster_a_path, raster_b_path],
                                           target_path)

        nodata = coastal_blue_carbon.NODATA_FLOAT32_MIN
        try:
            raster = gdal.OpenEx(target_path)
            numpy.testing.assert_allclose(
                raster.ReadAsArray(),
                numpy.array([[8, 4, 12, nodata]], dtype=numpy.float32))
        finally:
            raster = None
コード例 #4
0
    def test_biophysical_table_missing_lucode(self):
        """SWY test bad biophysical table with missing LULC value."""
        from natcap.invest.seasonal_water_yield import seasonal_water_yield
        import pygeoprocessing

        # use predefined directory so test can clean up files during teardown
        args = SeasonalWaterYieldRegressionTests.generate_base_args(
            self.workspace_dir)
        # make args explicit that this is a base run of SWY
        args['user_defined_climate_zones'] = False
        args['user_defined_local_recharge'] = False
        args['monthly_alpha'] = False
        args['results_suffix'] = ''

        # add a LULC value not found in biophysical csv
        lulc_new_path = os.path.join(self.workspace_dir, 'lulc_new.tif')
        lulc_info = pygeoprocessing.get_raster_info(args['lulc_raster_path'])
        lulc_array = gdal.OpenEx(args['lulc_raster_path']).ReadAsArray()
        lulc_array[0][0] = 321
        # set a nodata value to make sure nodatas are handled correctly when
        # reclassifying
        lulc_array[0][1] = lulc_info['nodata'][0]
        pygeoprocessing.numpy_array_to_raster(
            lulc_array, lulc_info['nodata'][0], lulc_info['pixel_size'],
            (lulc_info['geotransform'][0], lulc_info['geotransform'][3]),
            lulc_info['projection_wkt'], lulc_new_path)

        lulc_array = None
        args['lulc_raster_path'] = lulc_new_path

        with self.assertRaises(ValueError) as context:
            seasonal_water_yield.execute(args)
        self.assertTrue(
            ("The missing values found in the LULC raster but not the"
             " table are: [321]") in str(context.exception))
コード例 #5
0
    def test_point_snapping_break_ties(self):
        """DelineateIt: distance ties are broken using flow accumulation."""
        from natcap.invest.delineateit import delineateit

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        wkt = srs.ExportToWkt()

        # need stream layer, points
        stream_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]],
            dtype=numpy.int8)
        stream_raster_path = os.path.join(self.workspace_dir, 'streams.tif')
        flow_accum_array = numpy.array(
            [[1, 5, 1, 1, 1, 1], [1, 5, 1, 1, 1, 1], [1, 5, 1, 1, 1, 1],
             [1, 5, 1, 1, 1, 1], [1, 5, 9, 9, 9, 9], [1, 4, 1, 1, 1, 1],
             [1, 4, 1, 1, 1, 1]],
            dtype=numpy.int8)
        flow_accum_path = os.path.join(self.workspace_dir, 'flow_accum.tif')
        pygeoprocessing.numpy_array_to_raster(stream_matrix, 255, (2, -2),
                                              (2, -2), wkt, stream_raster_path)
        pygeoprocessing.numpy_array_to_raster(flow_accum_array, -1, (2, -2),
                                              (2, -2), wkt, flow_accum_path)

        source_points_path = os.path.join(self.workspace_dir,
                                          'source_features.geojson')
        source_features = [Point(9, -7)]  # equidistant from two streams
        pygeoprocessing.shapely_geometry_to_vector(
            source_features,
            source_points_path,
            wkt,
            'GeoJSON',
            ogr_geom_type=ogr.wkbUnknown)

        snapped_points_path = os.path.join(self.workspace_dir,
                                           'snapped_points.gpkg')

        snap_distance = 10  # large enough to get multiple streams per point.
        delineateit.snap_points_to_nearest_stream(source_points_path,
                                                  stream_raster_path,
                                                  flow_accum_path,
                                                  snap_distance,
                                                  snapped_points_path)

        snapped_points_vector = gdal.OpenEx(snapped_points_path,
                                            gdal.OF_VECTOR)
        snapped_points_layer = snapped_points_vector.GetLayer()

        # should snap to stream point [4, 3] in the array above
        # if not considering flow accumulation, it would snap to the
        # nearest stream point found first in the array, at [2, 1]
        points = [
            shapely.wkb.loads(bytes(feature.GetGeometryRef().ExportToWkb()))
            for feature in snapped_points_layer
        ]
        self.assertEqual(len(points), 1)
        self.assertEqual((points[0].x, points[0].y), (9, -11))
コード例 #6
0
ファイル: stormwater.py プロジェクト: natcap/invest
def raster_average(raster_path, radius, kernel_path, out_path):
    """Average pixel values within a radius.

    Make a search kernel where a pixel has '1' if its centerpoint is within
    the radius of the center pixel's centerpoint.
    For each pixel in a raster, center the search kernel on top of it. Then
    its "neighborhood" includes all the pixels that are below a '1' in the
    search kernel. Add up the neighborhood pixel values and divide by how
    many there are.

    This accounts for edge pixels and nodata pixels. For instance, if the
    kernel covers a 3x3 pixel area centered on each pixel, most pixels will
    have 9 valid pixels in their neighborhood, most edge pixels will have 6,
    and most corner pixels will have 4. Edge and nodata pixels in the
    neighborhood don't count towards the total (denominator in the average).

    Args:
        raster_path (str): path to the raster file to average
        radius (float): distance to average around each pixel's centerpoint in
            raster coordinate system units
        kernel_path (str): path to write out the search kernel raster, an
            intermediate output required by pygeoprocessing.convolve_2d
        out_path (str): path to write out the averaged raster output

    Returns:
        None
    """
    search_kernel = make_search_kernel(raster_path, radius)

    srs = osr.SpatialReference()
    srs.ImportFromEPSG(3857)
    projection_wkt = srs.ExportToWkt()
    pygeoprocessing.numpy_array_to_raster(
        # float32 here to avoid pygeoprocessing bug issue #180
        search_kernel.astype(numpy.float32),
        FLOAT_NODATA,
        (20, -20),
        (0, 0),
        projection_wkt,
        kernel_path)

    # convolve the signal (input raster) with the kernel and normalize
    # this is equivalent to taking an average of each pixel's neighborhood
    pygeoprocessing.convolve_2d(
        (raster_path, 1),
        (kernel_path, 1),
        out_path,
        # pixels with nodata or off the edge of the raster won't count towards
        # the sum or the number of values to normalize by
        ignore_nodata_and_edges=True,
        # divide by number of valid pixels in the kernel (averaging)
        normalize_kernel=True,
        # output will have nodata where ratio_path has nodata
        mask_nodata=True,
        target_datatype=gdal.GDT_Float32,
        target_nodata=FLOAT_NODATA)
コード例 #7
0
    def test_track_first_disturbance(self):
        """CBC: Track disturbances over time."""
        float32_nodata = coastal_blue_carbon.NODATA_FLOAT32_MIN

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84 / UTM zone 31 S
        wkt = srs.ExportToWkt()

        disturbance_magnitude_path = os.path.join(self.workspace_dir,
                                                  'disturbance_magnitude.tif')
        disturbance_magnitude_matrix = numpy.array(
            [[0.5, float32_nodata, 0.0]], dtype=numpy.float32)
        pygeoprocessing.numpy_array_to_raster(disturbance_magnitude_matrix,
                                              float32_nodata, (2, -2), (2, -2),
                                              wkt, disturbance_magnitude_path)

        stocks_path = os.path.join(self.workspace_dir, 'stocks.tif')
        stocks_matrix = numpy.array([[10, -1, 10]], dtype=numpy.float32)
        pygeoprocessing.numpy_array_to_raster(stocks_matrix, -1, (2, -2),
                                              (2, -2), wkt, stocks_path)

        current_year = 2010

        target_disturbance_path = os.path.join(self.workspace_dir,
                                               'disturbance_volume.tif')
        target_year_of_disturbance = os.path.join(self.workspace_dir,
                                                  'year_of_disturbance.tif')

        coastal_blue_carbon._track_disturbance(
            disturbance_magnitude_path,
            stocks_path,
            None,  # No prior disturbance volume
            None,  # No prior disturbance years
            current_year,
            target_disturbance_path,
            target_year_of_disturbance)

        try:
            expected_disturbance = numpy.array([[5.0, float32_nodata, 0.0]],
                                               dtype=numpy.float32)
            raster = gdal.OpenEx(target_disturbance_path)
            numpy.testing.assert_allclose(raster.ReadAsArray(),
                                          expected_disturbance)
        finally:
            raster = None

        try:
            uint16_nodata = coastal_blue_carbon.NODATA_UINT16_MAX
            expected_year_of_disturbance = numpy.array(
                [[2010, uint16_nodata, uint16_nodata]], dtype=numpy.uint16)
            raster = gdal.OpenEx(target_year_of_disturbance)
            numpy.testing.assert_allclose(raster.ReadAsArray(),
                                          expected_year_of_disturbance)
        finally:
            raster = None
def make_kernel_raster(pixel_radius, target_path):
    """Create kernel with given radius to `target_path`."""
    truncate = 4
    size = int(pixel_radius * 2 * truncate + 1)
    step_fn = numpy.zeros((size, size))
    step_fn[size//2, size//2] = 1
    kernel_array = scipy.ndimage.filters.gaussian_filter(
        step_fn, pixel_radius, order=0, mode='reflect', cval=0.0,
        truncate=truncate)
    pygeoprocessing.numpy_array_to_raster(
        kernel_array, -1, (1., -1.), (0.,  0.), None,
        target_path)
コード例 #9
0
    def test_local_recharge_undefined_nodata(self):
        """Test `calculate_local_recharge` with undefined nodata values"""
        from natcap.invest.seasonal_water_yield import seasonal_water_yield_core

        # set up tiny raster arrays to test
        precip_array = numpy.array([[10, 10], [10, 10]], dtype=numpy.float32)
        et0_array = numpy.array([[100, 100], [200, 200]], dtype=numpy.float32)
        quickflow_array = numpy.array(
            [[-4.8e-36, -4.822e-36], [6.1e-01, 6.1e-01]], dtype=numpy.float32)
        flow_dir_array = numpy.array([[15, 25], [50, 50]], dtype=numpy.float32)
        kc_array = numpy.array([[1, 1], [1, 1]], dtype=numpy.float32)
        stream_mask = numpy.array([[0, 0], [0, 0]], dtype=numpy.float32)

        precip_path = os.path.join(self.workspace_dir, 'precip.tif')
        et0_path = os.path.join(self.workspace_dir, 'et0.tif')
        quickflow_path = os.path.join(self.workspace_dir, 'quickflow.tif')
        flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
        kc_path = os.path.join(self.workspace_dir, 'kc.tif')
        stream_path = os.path.join(self.workspace_dir, 'stream.tif')

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(26910)  # UTM Zone 10N
        project_wkt = srs.ExportToWkt()
        output_path = os.path.join(self.workspace_dir, 'quickflow.tif')

        # write all the test arrays to raster files
        for array, path in [(precip_array, precip_path),
                            (et0_array, et0_path)]:
            # make the nodata value undefined for user inputs
            pygeoprocessing.numpy_array_to_raster(array, None, (1, -1),
                                                  (1180000, 690000),
                                                  project_wkt, path)
        for array, path in [(quickflow_array, quickflow_path),
                            (flow_dir_array, flow_dir_path),
                            (kc_array, kc_path), (stream_mask, stream_path)]:
            # define a nodata value for intermediate outputs
            pygeoprocessing.numpy_array_to_raster(array, -1, (1, -1),
                                                  (1180000, 690000),
                                                  project_wkt, path)

        # arbitrary values for alpha, beta, gamma, etc.
        # not verifying the output, just making sure there are no errors
        seasonal_water_yield_core.calculate_local_recharge(
            [precip_path for i in range(12)], [et0_path for i in range(12)],
            [quickflow_path
             for i in range(12)], flow_dir_path, [kc_path for i in range(12)],
            {i: 0.5
             for i in range(12)}, 0.5, 0.5, stream_path,
            os.path.join(self.workspace_dir, 'target_li_path.tif'),
            os.path.join(self.workspace_dir, 'target_li_avail_path.tif'),
            os.path.join(self.workspace_dir, 'target_l_sum_avail_path.tif'),
            os.path.join(self.workspace_dir, 'target_aet_path.tif'))
コード例 #10
0
    def test_monthly_quickflow_undefined_nodata(self):
        """Test `_calculate_monthly_quick_flow` with undefined nodata values"""
        from natcap.invest.seasonal_water_yield import seasonal_water_yield

        # set up tiny raster arrays to test
        precip_array = numpy.array([[10, 10], [10, 10]], dtype=numpy.float32)
        lulc_array = numpy.array([[1, 1], [2, 2]], dtype=numpy.float32)
        cn_array = numpy.array([[40, 40], [80, 80]], dtype=numpy.float32)
        si_array = numpy.array([[15, 15], [2.5, 2.5]], dtype=numpy.float32)
        n_events_array = numpy.array([[10, 10], [1, 1]], dtype=numpy.float32)
        stream_mask = numpy.array([[0, 0], [0, 0]], dtype=numpy.float32)

        expected_quickflow_array = numpy.array(
            [[-4.82284552e-36, -4.82284552e-36],
             [6.19275831e-01, 6.19275831e-01]])

        precip_path = os.path.join(self.workspace_dir, 'precip.tif')
        lulc_path = os.path.join(self.workspace_dir, 'lulc.tif')
        cn_path = os.path.join(self.workspace_dir, 'cn.tif')
        si_path = os.path.join(self.workspace_dir, 'si.tif')
        n_events_path = os.path.join(self.workspace_dir, 'n_events.tif')
        stream_path = os.path.join(self.workspace_dir, 'stream.tif')

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(26910)  # UTM Zone 10N
        project_wkt = srs.ExportToWkt()
        output_path = os.path.join(self.workspace_dir, 'quickflow.tif')

        # write all the test arrays to raster files
        for array, path in [(precip_array, precip_path),
                            (lulc_array, lulc_path),
                            (n_events_array, n_events_path)]:
            # make the nodata value undefined for user inputs
            pygeoprocessing.numpy_array_to_raster(array, None, (1, -1),
                                                  (1180000, 690000),
                                                  project_wkt, path)
        for array, path in [(cn_array, cn_path), (si_array, si_path),
                            (stream_mask, stream_path)]:
            # define a nodata value for intermediate outputs
            pygeoprocessing.numpy_array_to_raster(array, -1, (1, -1),
                                                  (1180000, 690000),
                                                  project_wkt, path)

        # save the quickflow results raster to quickflow.tif
        seasonal_water_yield._calculate_monthly_quick_flow(
            precip_path, lulc_path, cn_path, n_events_path, stream_path,
            si_path, output_path)
        # read the raster output back in to a numpy array
        quickflow_array = pygeoprocessing.raster_to_numpy_array(output_path)
        # assert each element is close to the expected value
        self.assertTrue(
            numpy.isclose(quickflow_array, expected_quickflow_array).all())
コード例 #11
0
ファイル: test_wind_energy.py プロジェクト: richpsharp/invest
    def test_calculate_distances_land_grid(self):
        """WindEnergy: testing 'calculate_distances_land_grid' function."""
        from natcap.invest import wind_energy

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)
        pos_x = origin[0]
        pos_y = origin[1]

        # Setup parameters for creating point shapefile
        fields = {'id': ogr.OFTReal, 'L2G': ogr.OFTReal}
        attrs = [{'id': 1, 'L2G': 10}, {'id': 2, 'L2G': 20}]

        geometries = [
            Point(pos_x + 50, pos_y - 50),
            Point(pos_x + 50, pos_y - 150)
        ]
        land_shape_path = os.path.join(self.workspace_dir, 'temp_shape.shp')
        # Create point shapefile to use for testing input
        pygeoprocessing.shapely_geometry_to_vector(geometries,
                                                   land_shape_path,
                                                   projection_wkt,
                                                   'ESRI Shapefile',
                                                   fields=fields,
                                                   attribute_list=attrs,
                                                   ogr_geom_type=ogr.wkbPoint)

        # Setup parameters for create raster
        matrix = numpy.array([[1, 1, 1, 1], [1, 1, 1, 1]], dtype=numpy.int32)
        harvested_masked_path = os.path.join(self.workspace_dir,
                                             'temp_raster.tif')
        # Create raster to use for testing input
        pygeoprocessing.numpy_array_to_raster(matrix, -1, (100, -100), origin,
                                              projection_wkt,
                                              harvested_masked_path)

        tmp_dist_final_path = os.path.join(self.workspace_dir,
                                           'dist_final.tif')
        # Call function to test given testing inputs
        wind_energy._calculate_distances_land_grid(land_shape_path,
                                                   harvested_masked_path,
                                                   tmp_dist_final_path, '')

        # Compare the results
        res_array = pygeoprocessing.raster_to_numpy_array(tmp_dist_final_path)
        exp_array = numpy.array([[10, 110, 210, 310], [20, 120, 220, 320]],
                                dtype=numpy.int32)
        numpy.testing.assert_allclose(res_array, exp_array)
コード例 #12
0
ファイル: test_wind_energy.py プロジェクト: natcap/invest
    def test_create_distance_raster(self):
        """WindEnergy: testing '_create_distance_raster' function."""
        from natcap.invest import wind_energy

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)  #UTM Zone 10N
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)
        pos_x = origin[0]
        pos_y = origin[1]

        # Setup and create vector to pass to function
        fields = {'id': ogr.OFTReal}
        attrs = [{'id': 1}]

        # Square polygon that will overlap the 4 pixels of the raster in the
        # upper left corner
        poly_geometry = [box(pos_x, pos_y - 17, pos_x + 17, pos_y)]
        poly_vector_path = os.path.join(self.workspace_dir,
                                        'distance_from_vector.gpkg')
        # Create polygon shapefile to use as testing input
        pygeoprocessing.shapely_geometry_to_vector(
            poly_geometry,
            poly_vector_path,
            projection_wkt,
            'GPKG',
            fields=fields,
            attribute_list=attrs,
            ogr_geom_type=ogr.wkbPolygon)

        # Create 2x5 raster
        matrix = numpy.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1]],
                             dtype=numpy.float32)
        base_raster_path = os.path.join(self.workspace_dir, 'temp_raster.tif')
        # Create raster to use for testing input
        pygeoprocessing.numpy_array_to_raster(matrix, -1, (10, -10), origin,
                                              projection_wkt, base_raster_path)

        dist_raster_path = os.path.join(self.workspace_dir, 'dist.tif')
        # Call function to test given testing inputs
        wind_energy._create_distance_raster(base_raster_path, poly_vector_path,
                                            dist_raster_path,
                                            self.workspace_dir)

        # Compare the results
        res_array = pygeoprocessing.raster_to_numpy_array(dist_raster_path)
        exp_array = numpy.array([[0, 0, 10, 20, 30], [0, 0, 10, 20, 30]],
                                dtype=numpy.float32)
        numpy.testing.assert_allclose(res_array, exp_array)
コード例 #13
0
    def test_pixel_size_based_on_coordinate_transform(self):
        """WaveEnergy: test '_pixel_size_based_on_coordinate_transform' fn."""
        from natcap.invest import wave_energy

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)

        # Define a Lat/Long WGS84 projection
        epsg_id = 4326
        reference = osr.SpatialReference()
        reference.ImportFromEPSG(epsg_id)
        # Get projection as WKT
        latlong_proj = reference.ExportToWkt()
        # Set origin to use for setting up geometries / geotransforms
        latlong_origin = (-70.5, 42.5)

        # Get a point from the clipped data object to use later in helping
        # determine proper pixel size
        matrix = numpy.array([[1, 1, 1, 1], [1, 1, 1, 1]], dtype=numpy.int32)
        raster_path = os.path.join(self.workspace_dir, 'input_raster.tif')
        # Create raster to use as testing input
        pygeoprocessing.numpy_array_to_raster(matrix, -1.0,
                                              (0.033333, -0.033333),
                                              latlong_origin, latlong_proj,
                                              raster_path)

        raster_gt = pygeoprocessing.geoprocessing.get_raster_info(
            raster_path)['geotransform']
        point = (raster_gt[0], raster_gt[3])
        raster_wkt = latlong_proj

        # Create a Spatial Reference from the rasters WKT
        raster_sr = osr.SpatialReference()
        raster_sr.ImportFromWkt(raster_wkt)

        # A coordinate transformation to help get the proper pixel size of
        # the reprojected raster
        coord_trans = utils.create_coordinate_transformer(raster_sr, srs)
        # Call the function to test
        result = wave_energy._pixel_size_based_on_coordinate_transform(
            raster_path, coord_trans, point)

        expected_res = (5553.933063, -1187.370813)

        # Compare
        for res, exp in zip(result, expected_res):
            self.assertAlmostEqual(res, exp, places=5)
コード例 #14
0
def make_raster_from_array(base_array, base_raster_path):
    """Make a raster from an array on a designated path.

    Args:
        array (numpy.ndarray): the 2D array for making the raster.
        raster_path (str): path to the raster to be created.

    Returns:
        None.

    """
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(26910)  # UTM Zone 10N
    project_wkt = srs.ExportToWkt()

    # Each pixel is 1x1 m
    pygeoprocessing.numpy_array_to_raster(base_array, -1, (1, -1),
                                          (1180000, 690000), project_wkt,
                                          base_raster_path)
コード例 #15
0
ファイル: test_delineateit.py プロジェクト: dkav/invest
    def test_find_pour_points_by_block(self):
        """DelineateIt: test pour point detection against block edges."""
        from natcap.invest.delineateit import delineateit

        a = 100  # nodata value
        flow_dir_array = numpy.array(
            [[0, 0, 0, 0, 7, 7, 7, 1, 6, 6], [2, 3, 4, 5, 6, 7, 0, 1, 1, 2],
             [2, 2, 2, 2, 0, a, a, 3, 3, a], [2, 1, 1, 1, 2, 6, 4, 1, a, a],
             [1, 1, 0, 0, 0, 0, a, a, a, a]],
            dtype=numpy.int8)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)
        projection_wkt = srs.ExportToWkt()

        raster_path = os.path.join(self.workspace_dir, 'small_raster.tif')
        pygeoprocessing.numpy_array_to_raster(flow_dir_array, a, (1, 1),
                                              (0, 0), projection_wkt,
                                              raster_path)

        expected_pour_points = {(7.5, 0.5), (5.5, 1.5), (4.5, 2.5), (5.5, 4.5)}

        # Mock iterblocks so that we can test with an array smaller than 128x128
        # to make sure that the algorithm gets pour points on block edges e.g.
        # flow_dir_array[2, 4]
        def mock_iterblocks(*args, **kwargs):
            xoffs = [0, 4, 8]
            win_xsizes = [4, 4, 2]
            for xoff, win_xsize in zip(xoffs, win_xsizes):
                yield {
                    'xoff': xoff,
                    'yoff': 0,
                    'win_xsize': win_xsize,
                    'win_ysize': 5
                }

        with mock.patch(
                'natcap.invest.delineateit.delineateit.pygeoprocessing.iterblocks',
                mock_iterblocks):
            pour_points = delineateit._find_raster_pour_points(
                (raster_path, 1))
            self.assertEqual(pour_points, expected_pour_points)
コード例 #16
0
ファイル: test_hra.py プロジェクト: natcap/invest
def _make_raster_from_array(base_array, target_raster_path, projected=True):
    """Make a raster from an array on a designated path.

    Args:
        array (numpy.ndarray): the 2D array for making the raster.

        raster_path (str): path to the output raster.

        projected (bool): if true, define projection information for the raster
            based on an ESPG code.

    Returns:
        None.

    """
    srs = osr.SpatialReference()
    if projected:
        srs.ImportFromEPSG(EPSG_CODE)  # UTM Zone 10N, unit = meter
    project_wkt = srs.ExportToWkt()

    pygeoprocessing.numpy_array_to_raster(
        base_array, -1, (1, -1), ORIGIN, project_wkt, target_raster_path)
コード例 #17
0
ファイル: test_sdr.py プロジェクト: natcap/invest
    def test_what_drains_to_stream(self):
        """SDR test for what pixels drain to a stream."""
        from natcap.invest.sdr import sdr

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(26910)  # NAD83 / UTM zone 11N
        srs_wkt = srs.ExportToWkt()
        origin = (463250, 4929700)
        pixel_size = (30, -30)

        flow_dir_mfd = numpy.array([[0, 1], [1, 1]], dtype=numpy.float64)
        flow_dir_mfd_nodata = 0  # Matches pygeoprocessing output
        flow_dir_mfd_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
        pygeoprocessing.numpy_array_to_raster(flow_dir_mfd,
                                              flow_dir_mfd_nodata, pixel_size,
                                              origin, srs_wkt,
                                              flow_dir_mfd_path)

        dist_to_channel = numpy.array([[10, 5], [-1, 6]], dtype=numpy.float64)
        dist_to_channel_nodata = -1  # Matches pygeoprocessing output
        dist_to_channel_path = os.path.join(self.workspace_dir,
                                            'dist_to_channel.tif')
        pygeoprocessing.numpy_array_to_raster(dist_to_channel,
                                              dist_to_channel_nodata,
                                              pixel_size, origin, srs_wkt,
                                              dist_to_channel_path)

        target_what_drains_path = os.path.join(self.workspace_dir,
                                               'what_drains.tif')
        sdr._calculate_what_drains_to_stream(flow_dir_mfd_path,
                                             dist_to_channel_path,
                                             target_what_drains_path)

        # 255 is the byte nodata value assigned
        expected_drainage = numpy.array([[255, 1], [0, 1]], dtype=numpy.uint8)
        what_drains = pygeoprocessing.raster_to_numpy_array(
            target_what_drains_path)
        numpy.testing.assert_allclose(what_drains, expected_drainage)
コード例 #18
0
    def test_count_pixels_groups(self):
        """WaveEnergy: testing '_count_pixels_groups' function."""
        from natcap.invest import wave_energy

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)

        group_values = [1, 3, 5, 7]
        matrix = numpy.array([[1, 3, 5, 9], [3, 7, 1, 5], [2, 4, 5, 7]],
                             dtype=numpy.int32)

        raster_path = os.path.join(self.workspace_dir, 'pixel_groups.tif')
        # Create raster to use for testing input
        pygeoprocessing.numpy_array_to_raster(matrix, -1, (100, -100), origin,
                                              projection_wkt, raster_path)

        results = wave_energy._count_pixels_groups(raster_path, group_values)

        expected_results = [2, 2, 3, 2]

        for res, exp_res in zip(results, expected_results):
            self.assertAlmostEqual(res, exp_res, places=6)
コード例 #19
0
ファイル: test_carbon.py プロジェクト: richpsharp/invest
    def test_carbon_totals_precision(self):
        """Carbon: check float64 precision in pixel value summation."""
        from natcap.invest import carbon

        big_float32_array = numpy.random.default_rng(seed=1).random(
            (1000, 1000), dtype=numpy.float32)

        # Throw in some nodata values for good measure.
        nodata = numpy.finfo(numpy.float32).min
        big_float32_array[1:15] = nodata

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        wkt = srs.ExportToWkt()
        raster_path = os.path.join(self.workspace_dir, 'raster.tif')
        pygeoprocessing.numpy_array_to_raster(big_float32_array, float(nodata),
                                              (2, -2), (2, -2), wkt,
                                              raster_path)

        # Verify better-than-float32 precision on raster summation.
        # Using a numpy float32 in numpy.sum will pass up to rtol=1e-9.
        numpy.testing.assert_allclose(carbon._accumulate_totals(raster_path),
                                      492919.73994,
                                      rtol=1e-12)  # Note better precision
コード例 #20
0
ファイル: test_delineateit.py プロジェクト: dkav/invest
    def test_check_geometries(self):
        """DelineateIt: Check that we can reasonably repair geometries."""
        from natcap.invest.delineateit import delineateit
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        projection_wkt = srs.ExportToWkt()

        dem_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]],
            dtype=numpy.int8)
        dem_raster_path = os.path.join(self.workspace_dir, 'dem.tif')
        # byte datatype
        pygeoprocessing.numpy_array_to_raster(dem_matrix, 255, (2, -2),
                                              (2, -2), projection_wkt,
                                              dem_raster_path)

        # empty geometry
        invalid_geometry = ogr.CreateGeometryFromWkt('POLYGON EMPTY')
        self.assertTrue(invalid_geometry.IsEmpty())

        # point outside of the DEM bbox
        invalid_point = ogr.CreateGeometryFromWkt('POINT (-100 -100)')

        # line intersects the DEM but is not contained by it
        valid_line = ogr.CreateGeometryFromWkt(
            'LINESTRING (-100 100, 100 -100)')

        # invalid polygon coult fixed by buffering by 0
        invalid_bowtie_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 2 -6, 6 -6, 2 -2))')
        self.assertFalse(invalid_bowtie_polygon.IsValid())

        # Bowtie polygon with vertex in the middle, could be fixed
        # by buffering by 0
        invalid_alt_bowtie_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 4 -4, 6 -6, 2 -6, 4 -4, 2 -2))')
        self.assertFalse(invalid_alt_bowtie_polygon.IsValid())

        # invalid polygon could be fixed by closing rings
        invalid_open_ring_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 6 -6, 2 -6))')
        self.assertFalse(invalid_open_ring_polygon.IsValid())

        gpkg_driver = gdal.GetDriverByName('GPKG')
        outflow_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg')
        outflow_vector = gpkg_driver.Create(outflow_vector_path, 0, 0, 0,
                                            gdal.GDT_Unknown)
        outflow_layer = outflow_vector.CreateLayer('outflow_layer', srs,
                                                   ogr.wkbUnknown)
        outflow_layer.CreateField(ogr.FieldDefn('geom_id', ogr.OFTInteger))

        outflow_layer.StartTransaction()
        for index, geometry in enumerate(
            (invalid_geometry, invalid_point, valid_line,
             invalid_bowtie_polygon, invalid_alt_bowtie_polygon,
             invalid_open_ring_polygon)):
            if geometry is None:
                self.fail('Geometry could not be created')

            outflow_feature = ogr.Feature(outflow_layer.GetLayerDefn())
            outflow_feature.SetField('geom_id', index)
            outflow_feature.SetGeometry(geometry)
            outflow_layer.CreateFeature(outflow_feature)
        outflow_layer.CommitTransaction()

        self.assertEqual(outflow_layer.GetFeatureCount(), 6)
        outflow_layer = None
        outflow_vector = None

        target_vector_path = os.path.join(self.workspace_dir,
                                          'checked_geometries.gpkg')
        with self.assertRaises(ValueError) as cm:
            delineateit.check_geometries(outflow_vector_path,
                                         dem_raster_path,
                                         target_vector_path,
                                         skip_invalid_geometry=False)
        self.assertTrue('is invalid' in str(cm.exception))

        delineateit.check_geometries(outflow_vector_path,
                                     dem_raster_path,
                                     target_vector_path,
                                     skip_invalid_geometry=True)

        # I only expect to see 1 feature in the output layer, as there's only 1
        # valid geometry.
        expected_geom_areas = {
            2: 0,
        }

        target_vector = gdal.OpenEx(target_vector_path, gdal.OF_VECTOR)
        target_layer = target_vector.GetLayer()
        self.assertEqual(target_layer.GetFeatureCount(),
                         len(expected_geom_areas))

        for feature in target_layer:
            geom = feature.GetGeometryRef()
            self.assertAlmostEqual(
                geom.Area(), expected_geom_areas[feature.GetField('geom_id')])

        target_layer = None
        target_vector = None
コード例 #21
0
ファイル: test_delineateit.py プロジェクト: dkav/invest
    def test_point_snapping_multipoint(self):
        """DelineateIt: test multi-point snapping."""
        from natcap.invest.delineateit import delineateit

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        wkt = srs.ExportToWkt()

        # need stream layer, points
        stream_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]],
            dtype=numpy.int8)
        stream_raster_path = os.path.join(self.workspace_dir, 'streams.tif')
        # byte datatype
        pygeoprocessing.numpy_array_to_raster(stream_matrix, 255, (2, -2),
                                              (2, -2), wkt, stream_raster_path)

        source_points_path = os.path.join(self.workspace_dir,
                                          'source_features.gpkg')
        gpkg_driver = gdal.GetDriverByName('GPKG')
        points_vector = gpkg_driver.Create(source_points_path, 0, 0, 0,
                                           gdal.GDT_Unknown)
        layer_name = os.path.splitext(os.path.basename(source_points_path))[0]
        points_layer = points_vector.CreateLayer(layer_name,
                                                 points_vector.GetSpatialRef(),
                                                 ogr.wkbUnknown)
        # Create a bunch of points for the various OGR multipoint types and
        # make sure that they are all snapped to exactly the same place.
        points_layer.StartTransaction()
        for multipoint_type in (ogr.wkbMultiPoint, ogr.wkbMultiPointM,
                                ogr.wkbMultiPointZM, ogr.wkbMultiPoint25D):
            new_feature = ogr.Feature(points_layer.GetLayerDefn())
            new_geom = ogr.Geometry(multipoint_type)
            component_point = ogr.Geometry(ogr.wkbPoint)
            component_point.AddPoint(3, -5)
            new_geom.AddGeometry(component_point)
            new_feature.SetGeometry(new_geom)
            points_layer.CreateFeature(new_feature)

        # Verify point snapping will run if we give it empty multipoints.
        for point_type in (ogr.wkbPoint, ogr.wkbMultiPoint):
            new_feature = ogr.Feature(points_layer.GetLayerDefn())
            new_geom = ogr.Geometry(point_type)
            new_feature.SetGeometry(new_geom)
            points_layer.CreateFeature(new_feature)

        points_layer.CommitTransaction()

        snapped_points_path = os.path.join(self.workspace_dir,
                                           'snapped_points.gpkg')
        snap_distance = 10  # large enough to get multiple streams per point.
        delineateit.snap_points_to_nearest_stream(source_points_path,
                                                  (stream_raster_path, 1),
                                                  snap_distance,
                                                  snapped_points_path)

        try:
            snapped_points_vector = gdal.OpenEx(snapped_points_path,
                                                gdal.OF_VECTOR)
            snapped_points_layer = snapped_points_vector.GetLayer()

            # All 4 multipoints should have been snapped to the same place and
            # should all be Point geometries.
            self.assertEqual(4, snapped_points_layer.GetFeatureCount())
            expected_feature = shapely.geometry.Point(5, -5)
            for feature in snapped_points_layer:
                shapely_feature = shapely.wkb.loads(
                    bytes(feature.GetGeometryRef().ExportToWkb()))
                self.assertTrue(shapely_feature.equals(expected_feature))
        finally:
            snapped_points_layer = None
            snapped_points_vector = None
コード例 #22
0
ファイル: test_delineateit.py プロジェクト: dkav/invest
    def test_point_snapping(self):
        """DelineateIt: test point snapping."""
        from natcap.invest.delineateit import delineateit

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        wkt = srs.ExportToWkt()

        # need stream layer, points
        stream_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]],
            dtype=numpy.int8)
        stream_raster_path = os.path.join(self.workspace_dir, 'streams.tif')
        # byte datatype
        pygeoprocessing.numpy_array_to_raster(stream_matrix, 255, (2, -2),
                                              (2, -2), wkt, stream_raster_path)

        source_points_path = os.path.join(self.workspace_dir,
                                          'source_features.geojson')
        source_features = [
            Point(-1, -1),  # off the edge of the stream raster.
            Point(3, -5),
            Point(7, -9),
            Point(13, -5),
            MultiPoint([(13, -5)]),
            box(-2, -2, -1, -1),  # Off the edge
        ]
        fields = {'foo': ogr.OFTInteger, 'bar': ogr.OFTString}
        attributes = [
            {
                'foo': 0,
                'bar': '0.1'
            },
            {
                'foo': 1,
                'bar': '1.1'
            },
            {
                'foo': 2,
                'bar': '2.1'
            },
            {
                'foo': 3,
                'bar': '3.1'
            },
            {
                'foo': 3,
                'bar': '3.1'
            },  # intentional duplicate fields
            {
                'foo': 4,
                'bar': '4.1'
            }
        ]
        pygeoprocessing.shapely_geometry_to_vector(
            source_features,
            source_points_path,
            wkt,
            'GeoJSON',
            fields=fields,
            attribute_list=attributes,
            ogr_geom_type=ogr.wkbUnknown)

        snapped_points_path = os.path.join(self.workspace_dir,
                                           'snapped_points.gpkg')

        snap_distance = -1
        with self.assertRaises(ValueError) as cm:
            delineateit.snap_points_to_nearest_stream(source_points_path,
                                                      (stream_raster_path, 1),
                                                      snap_distance,
                                                      snapped_points_path)
        self.assertTrue('must be >= 0' in str(cm.exception))

        snap_distance = 10  # large enough to get multiple streams per point.
        delineateit.snap_points_to_nearest_stream(source_points_path,
                                                  (stream_raster_path, 1),
                                                  snap_distance,
                                                  snapped_points_path)

        snapped_points_vector = gdal.OpenEx(snapped_points_path,
                                            gdal.OF_VECTOR)
        snapped_points_layer = snapped_points_vector.GetLayer()

        # snapped layer will include 4 valid points and 1 polygon.
        self.assertEqual(5, snapped_points_layer.GetFeatureCount())

        expected_geometries_and_fields = [
            (Point(5, -5), {
                'foo': 1,
                'bar': '1.1'
            }),
            (Point(5, -9), {
                'foo': 2,
                'bar': '2.1'
            }),
            (Point(13, -11), {
                'foo': 3,
                'bar': '3.1'
            }),
            (Point(13, -11), {
                'foo': 3,
                'bar': '3.1'
            }),  # Multipoint now point
            (box(-2, -2, -1, -1), {
                'foo': 4,
                'bar': '4.1'
            }),  # unchanged
        ]
        for feature, (expected_geom,
                      expected_fields) in zip(snapped_points_layer,
                                              expected_geometries_and_fields):
            shapely_feature = shapely.wkb.loads(
                bytes(feature.GetGeometryRef().ExportToWkb()))

            self.assertTrue(shapely_feature.equals(expected_geom))
            self.assertEqual(expected_fields, feature.items())
コード例 #23
0
def fill_by_convolution(base_raster_path, convolve_radius,
                        target_filled_raster_path):
    """Clip and fill.

    Clip the base raster data to the bounding box then fill any noodata
    holes with a weighted distance convolution.

    Args:
        base_raster_path (str): path to base raster
        convolve_radius (float): maximum convolution distance kernel in
            projected units of base.
        target_filled_raster_path (str): raster created by convolution fill,
            if holes are too far from valid pixels resulting fill will be
            nonsensical, perhaps NaN.

    Return:
        None
    """
    try:
        LOGGER.info(f'filling {base_raster_path}')
        # create working directory in the same directory as the target with
        # the same name as the target file so it can't be duplicated
        # easier to spot for debugging too
        working_dir = os.path.join(
            os.path.dirname(target_filled_raster_path),
            os.path.basename(os.path.splitext(target_filled_raster_path)[0]))
        try:
            os.makedirs(working_dir)
        except OSError:
            pass

        basename = os.path.basename(target_filled_raster_path)
        base_raster_info = pygeoprocessing.get_raster_info(base_raster_path)

        # this ensures a minimum of 3 pixels in case the pixel size is too
        # chunky
        n = max(3, int(convolve_radius / base_raster_info['pixel_size'][0]))
        base = numpy.zeros((n, n))
        base[n // 2, n // 2] = 1
        kernel_array = scipy.ndimage.filters.gaussian_filter(base, n / 3)
        kernel_raster_path = os.path.join(working_dir, f'kernel_{basename}')
        geotransform = base_raster_info['geotransform']
        pygeoprocessing.numpy_array_to_raster(
            kernel_array, None, base_raster_info['pixel_size'],
            (geotransform[0], geotransform[3]),
            base_raster_info['projection_wkt'], kernel_raster_path)

        # scrub input raster
        sanitized_base_raster_path = os.path.join(working_dir,
                                                  f'sanitized_{basename}')
        sanitize_raster(base_raster_path, sanitized_base_raster_path)

        # mask valid
        valid_raster_path = os.path.join(working_dir, f'sanitized_{basename}')
        pygeoprocessing.raster_calculator(
            [(base_raster_path, 1), (base_raster_info['nodata'][0], 'raw')],
            _mask_valid_op, valid_raster_path, gdal.GDT_Byte, None)
        mask_kernel_raster_path = os.path.join(working_dir,
                                               f'mask_kernel_{basename}')
        geotransform = base_raster_info['geotransform']
        mask_kernel_array = numpy.copy(kernel_array)
        mask_kernel_array[:] = 1
        pygeoprocessing.numpy_array_to_raster(
            mask_kernel_array, None, base_raster_info['pixel_size'],
            (geotransform[0], geotransform[3]),
            base_raster_info['projection_wkt'], mask_kernel_raster_path)
        coverage_raster_path = os.path.join(working_dir,
                                            f'coverage_{basename}')
        pygeoprocessing.convolve_2d((valid_raster_path, 1),
                                    (mask_kernel_raster_path, 1),
                                    coverage_raster_path,
                                    mask_nodata=False,
                                    target_nodata=-1,
                                    target_datatype=gdal.GDT_Byte,
                                    working_dir=working_dir)

        # this raster will be filled with the entire convolution
        backfill_raster_path = os.path.join(working_dir,
                                            f'backfill_{basename}')
        base_nodata = base_raster_info['nodata'][0]
        if base_nodata is None:
            target_datatype = gdal.GDT_Float64
        else:
            target_datatype = base_raster_info['datatype']
        LOGGER.info(f'create backfill from {sanitized_base_raster_path} to '
                    f'{backfill_raster_path}')
        pygeoprocessing.convolve_2d((sanitized_base_raster_path, 1),
                                    (kernel_raster_path, 1),
                                    backfill_raster_path,
                                    ignore_nodata_and_edges=True,
                                    mask_nodata=False,
                                    normalize_kernel=True,
                                    target_nodata=base_nodata,
                                    target_datatype=target_datatype,
                                    working_dir=working_dir)

        LOGGER.info(
            f'fill nodata of {base_raster_path} to {backfill_raster_path}')
        pygeoprocessing.raster_calculator([(base_raster_path, 1),
                                           (backfill_raster_path, 1),
                                           (coverage_raster_path, 1),
                                           (base_nodata, 'raw')],
                                          _fill_nodata_op,
                                          target_filled_raster_path,
                                          base_raster_info['datatype'],
                                          base_nodata)
        shutil.rmtree(working_dir)
    except Exception:
        LOGGER.exception(
            f'error on fill by convolution {target_filled_raster_path}')
        raise
コード例 #24
0
                              habitat_mask_raster_path,
                              burn_values=[1],
                              option_list=['ALL_TOUCHED=TRUE'])

    # Make convolution kernel
    kernel_path = os.path.join(churn_dir, 'kernel.tif')
    # assume square pixels
    kernel_radius = int(args.protective_distance // target_pixel_size[0])
    LOGGER.info(f"kernel radius: {kernel_radius}")
    kernel_x, kernel_y = numpy.meshgrid(range((kernel_radius - 1) * 2 + 1),
                                        range((kernel_radius - 1) * 2 + 1))
    kernel_distance = numpy.sqrt((kernel_x - (kernel_radius - 1))**2 +
                                 (kernel_y - (kernel_radius - 1))**2)
    kernel_array = (kernel_distance <= kernel_radius).astype(numpy.int8)

    pygeoprocessing.numpy_array_to_raster(kernel_array, 0, (1, -1), (0, 0),
                                          None, kernel_path)

    # Convolve CV points for coverage
    convolve_target_raster_path = os.path.join(churn_dir, 'convolve_2d.tif')
    pygeoprocessing.convolve_2d((shore_point_raster_path, 1), (kernel_path, 1),
                                convolve_target_raster_path,
                                ignore_nodata_and_edges=False,
                                mask_nodata=False,
                                normalize_kernel=False,
                                target_datatype=gdal.GDT_Float64)

    target_habitat_value_raster_path = os.path.join(
        args.workspace_dir, args.target_habitat_value_raster_filename)

    # TODO: mask result to habitat
    mask_by_nodata(convolve_target_raster_path, habitat_mask_raster_path,
コード例 #25
0
    def test_base_regression_nodata_inf(self):
        """SWY base regression test on sample data with really small nodata.

        Executes SWY in default mode and checks that the output files are
        generated and that the aggregate shapefile fields are the same as the
        regression case.
        """
        from natcap.invest.seasonal_water_yield import seasonal_water_yield

        # use predefined directory so test can clean up files during teardown
        args = SeasonalWaterYieldRegressionTests.generate_base_args(
            self.workspace_dir)

        # Ensure the model can pass when a nodata value is not defined.
        size = 100
        lulc_array = numpy.zeros((size, size), dtype=numpy.int8)
        lulc_array[size // 2:, :] = 1

        driver = gdal.GetDriverByName('GTiff')
        new_raster = driver.Create(args['lulc_raster_path'],
                                   lulc_array.shape[0], lulc_array.shape[1], 1,
                                   gdal.GDT_Byte)
        band = new_raster.GetRasterBand(1)
        band.WriteArray(lulc_array)
        geotransform = [1180000, 1, 0, 690000, 0, -1]
        new_raster.SetGeoTransform(geotransform)
        band = None
        new_raster = None
        driver = None

        # set precip nodata values to a large, negative 64bit value.
        nodata = numpy.finfo(numpy.float64).min
        precip_nodata_dir = os.path.join(self.workspace_dir,
                                         'precip_nodata_dir')
        os.makedirs(precip_nodata_dir)
        size = 100
        for month in range(1, 13):
            precip_raster_path = os.path.join(
                precip_nodata_dir, 'precip_mm_' + str(month) + '.tif')
            precip_array = numpy.full((size, size),
                                      month + 10,
                                      dtype=numpy.float64)
            precip_array[size - 1, :] = nodata

            srs = osr.SpatialReference()
            srs.ImportFromEPSG(26910)  # UTM Zone 10N
            project_wkt = srs.ExportToWkt()

            # Each pixel is 1x1 m
            pygeoprocessing.numpy_array_to_raster(precip_array, nodata,
                                                  (1, -1), (1180000, 690000),
                                                  project_wkt,
                                                  precip_raster_path)

        args['precip_dir'] = precip_nodata_dir

        # make args explicit that this is a base run of SWY
        args['user_defined_climate_zones'] = False
        args['user_defined_local_recharge'] = False
        args['monthly_alpha'] = False
        args['results_suffix'] = ''

        seasonal_water_yield.execute(args)

        # generate aggregated results csv table for assertion
        agg_results_csv_path = os.path.join(args['workspace_dir'],
                                            'agg_results_base.csv')
        with open(agg_results_csv_path, 'w') as open_table:
            open_table.write('0,1.0,50.076062\n')

        SeasonalWaterYieldRegressionTests._assert_regression_results_equal(
            os.path.join(args['workspace_dir'], 'aggregated_results_swy.shp'),
            agg_results_csv_path)
コード例 #26
0
    def _create_model_args(target_dir):
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84 / UTM zone 31 S
        wkt = srs.ExportToWkt()

        biophysical_table = [
            [
                'code', 'lulc-class', 'biomass-initial', 'soil-initial',
                'litter-initial', 'biomass-half-life',
                'biomass-low-impact-disturb', 'biomass-med-impact-disturb',
                'biomass-high-impact-disturb', 'biomass-yearly-accumulation',
                'soil-half-life', 'soil-low-impact-disturb',
                'soil-med-impact-disturb', 'soil-high-impact-disturb',
                'soil-yearly-accumulation', 'litter-yearly-accumulation'
            ],
            [
                1,
                'mangrove',
                64,
                313,
                3,  # initial
                15,
                0.5,
                0.5,
                1,
                2,  # biomass
                7.5,
                0.3,
                0.5,
                0.66,
                5.35,  # soil
                1
            ],  # litter accum.
            [
                2,
                'parking lot',
                0,
                0,
                0,  # initial
                0,
                0,
                0,
                0,
                0,  # biomass
                0,
                0,
                0,
                0,
                0,  # soil
                0
            ],  # litter accum.
        ]
        biophysical_table_path = os.path.join(target_dir, 'biophysical.csv')
        with open(biophysical_table_path, 'w') as bio_table:
            for line_list in biophysical_table:
                line = ','.join(str(field) for field in line_list)
                bio_table.write(f'{line}\n')

        transition_matrix = [['lulc-class', 'mangrove', 'parking lot'],
                             ['mangrove', 'NCC', 'high-impact-disturb'],
                             ['parking lot', 'accum', 'NCC']]
        transition_matrix_path = os.path.join(target_dir, 'transitions.csv')
        with open(transition_matrix_path, 'w') as transition_table:
            for line_list in transition_matrix:
                line = ','.join(line_list)
                transition_table.write(f'{line}\n')

        baseline_landcover_raster_path = os.path.join(target_dir,
                                                      'baseline_lulc.tif')
        baseline_matrix = numpy.array([[1, 2]], dtype=numpy.uint8)
        pygeoprocessing.numpy_array_to_raster(baseline_matrix, 255, (2, -2),
                                              (2, -2), wkt,
                                              baseline_landcover_raster_path)

        snapshot_2010_raster_path = os.path.join(target_dir,
                                                 'snapshot_2010.tif')
        snapshot_2010_matrix = numpy.array([[2, 1]], dtype=numpy.uint8)
        pygeoprocessing.numpy_array_to_raster(snapshot_2010_matrix, 255,
                                              (2, -2), (2, -2), wkt,
                                              snapshot_2010_raster_path)

        snapshot_2020_raster_path = os.path.join(target_dir,
                                                 'snapshot_2020.tif')
        snapshot_2020_matrix = numpy.array([[1, 2]], dtype=numpy.uint8)
        pygeoprocessing.numpy_array_to_raster(snapshot_2020_matrix, 255,
                                              (2, -2), (2, -2), wkt,
                                              snapshot_2020_raster_path)

        snapshot_rasters_csv_path = os.path.join(target_dir,
                                                 'snapshot_rasters.csv')
        baseline_year = 2000
        with open(snapshot_rasters_csv_path, 'w') as snapshot_rasters_csv:
            snapshot_rasters_csv.write('snapshot_year,raster_path\n')
            snapshot_rasters_csv.write(
                f'{baseline_year},{baseline_landcover_raster_path}\n')
            snapshot_rasters_csv.write(f'2010,{snapshot_2010_raster_path}\n')
            snapshot_rasters_csv.write(f'2020,{snapshot_2020_raster_path}\n')

        args = {
            'landcover_transitions_table': transition_matrix_path,
            'landcover_snapshot_csv': snapshot_rasters_csv_path,
            'biophysical_table_path': biophysical_table_path,
            'analysis_year': 2030,
            'do_economic_analysis': True,
            'use_price_table': True,
            'price_table_path': os.path.join(target_dir, 'price_table.csv'),
            'discount_rate': 4,
        }

        with open(args['price_table_path'], 'w') as price_table:
            price_table.write('year,price\n')
            prior_year_price = 1.0
            for year in range(baseline_year, args['analysis_year'] + 1):
                price = prior_year_price * 1.04
                price_table.write(f'{year},{price}\n')
        return args
コード例 #27
0
ファイル: test_wind_energy.py プロジェクト: natcap/invest
    def test_calculate_npv_levelized_rasters(self):
        """WindEnergy: testing '_calculate_npv_levelized_rasters' function."""
        from natcap.invest import wind_energy

        val_parameters_dict = {
            'air_density': 1.225,
            'exponent_power_curve': 2,
            'decommission_cost': 0.03,
            'operation_maintenance_cost': 0.03,
            'miscellaneous_capex_cost': 0.05,
            'installation_cost': 0.2,
            'infield_cable_length': 0.9,
            'infield_cable_cost': 260000,
            'mw_coef_ac': 810000,
            'mw_coef_dc': 1090000,
            'cable_coef_ac': 1360000,
            'cable_coef_dc': 890000,
            'ac_dc_distance_break': 60,
            'time_period': 5,
            'rotor_diameter_factor': 7,
            'carbon_coefficient': 6.90E-04,
            'air_density_coefficient': 1.19E-04,
            'loss_parameter': 0.05,
            'turbine_cost': 10000,
            'turbine_rated_pwr': 5
        }
        args = {
            'foundation_cost': 1000000,
            'discount_rate': 0.01,
            'number_of_turbines': 10
        }
        price_list = [0.10, 0.10, 0.10, 0.10, 0.10]

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)  #UTM Zone 10N
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)
        pos_x = origin[0]
        pos_y = origin[1]

        # Create harvested raster
        harvest_val = 1000000
        harvest_matrix = numpy.array([
            [
                harvest_val, harvest_val + 1e5, harvest_val + 2e5,
                harvest_val + 3e5, harvest_val + 4e5
            ],
            [
                harvest_val, harvest_val + 1e5, harvest_val + 2e5,
                harvest_val + 3e5, harvest_val + 4e5
            ],
        ],
                                     dtype=numpy.float32)
        base_harvest_path = os.path.join(self.workspace_dir,
                                         'harvest_raster.tif')
        # Create raster to use for testing input
        pygeoprocessing.numpy_array_to_raster(harvest_matrix, -1, (10, -10),
                                              origin, projection_wkt,
                                              base_harvest_path)
        # Create distance raster
        dist_matrix = numpy.array([[0, 10, 20, 30, 40], [0, 10, 20, 30, 40]],
                                  dtype=numpy.float32)
        base_distance_path = os.path.join(self.workspace_dir,
                                          'dist_raster.tif')
        # Create raster to use for testing input
        pygeoprocessing.numpy_array_to_raster(dist_matrix, -1, (10, -10),
                                              origin, projection_wkt,
                                              base_distance_path)

        target_npv_raster_path = os.path.join(self.workspace_dir, 'npv.tif')
        target_levelized_raster_path = os.path.join(self.workspace_dir,
                                                    'levelized.tif')
        # Call function to test given testing inputs
        wind_energy._calculate_npv_levelized_rasters(
            base_harvest_path, base_distance_path, target_npv_raster_path,
            target_levelized_raster_path, val_parameters_dict, args,
            price_list)

        # Compare the results that were "eye" tested.
        desired_npv_array = numpy.array([[
            309332320.0, 348331200.0, 387330020.0, 426328930.0, 465327800.0
        ], [309332320.0, 348331200.0, 387330020.0, 426328930.0, 465327800.0]],
                                        dtype=numpy.float32)
        actual_npv_array = pygeoprocessing.raster_to_numpy_array(
            target_npv_raster_path)
        numpy.testing.assert_allclose(actual_npv_array, desired_npv_array)

        desired_levelized_array = numpy.array([[
            0.016496297, 0.015000489, 0.0137539795, 0.01269924, 0.011795178
        ], [0.016496297, 0.015000489, 0.0137539795, 0.01269924, 0.011795178]],
                                              dtype=numpy.float32)
        actual_levelized_array = pygeoprocessing.raster_to_numpy_array(
            target_levelized_raster_path)
        numpy.testing.assert_allclose(actual_levelized_array,
                                      desired_levelized_array)
コード例 #28
0
    def test_watersheds_diagnostic_vector(self):
        """PGP watersheds: test diagnostic vector."""
        flow_dir_array = numpy.array(
            [[6, 6, 6, 6, 6, 6, 6, 6, 6, 6], [6, 6, 6, 6, 6, 6, 6, 6, 6, 6],
             [6, 6, 6, 6, 6, 6, 6, 6, 6, 6], [6, 6, 6, 6, 6, 6, 6, 6, 6, 255],
             [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 2, 2, 2, 2, 2, 2, 2, 2, 255],
             [2, 2, 2, 2, 2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
             [2, 2, 2, 2, 2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2, 2, 2, 2, 2]],
            dtype=numpy.int8)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84 / UTM zone 31s
        srs_wkt = srs.ExportToWkt()

        flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
        pygeoprocessing.numpy_array_to_raster(base_array=flow_dir_array,
                                              target_nodata=255,
                                              pixel_size=(2, -2),
                                              origin=(2, -2),
                                              projection_wkt=srs_wkt,
                                              target_path=flow_dir_path)

        # These geometries test:
        #  * Delineation works with varying geometry types
        #  * That we exclude seed pixels that are over nodata
        #  * That we exclude seed pixels off the bounds of the raster
        horizontal_line = shapely.geometry.LineString([(19, -11), (25, -11)])
        vertical_line = shapely.geometry.LineString([(21, -9), (21, -13)])
        square = shapely.geometry.box(17, -13, 21, -9)
        point = shapely.geometry.Point(21, -11)

        outflow_vector_path = os.path.join(self.workspace_dir, 'outflow.gpkg')
        pygeoprocessing.shapely_geometry_to_vector(
            [horizontal_line, vertical_line, square, point],
            outflow_vector_path,
            srs_wkt,
            'GPKG', {
                'polygon_id': ogr.OFTInteger,
                'field_string': ogr.OFTString,
                'other': ogr.OFTReal
            }, [{
                'polygon_id': 1,
                'field_string': 'hello world',
                'other': 1.111
            }, {
                'polygon_id': 2,
                'field_string': 'hello foo',
                'other': 2.222
            }, {
                'polygon_id': 3,
                'field_string': 'hello bar',
                'other': 3.333
            }, {
                'polygon_id': 4,
                'field_string': 'hello baz',
                'other': 4.444
            }],
            ogr_geom_type=ogr.wkbUnknown)

        target_watersheds_path = os.path.join(self.workspace_dir,
                                              'watersheds.gpkg')

        pygeoprocessing.routing.delineate_watersheds_d8(
            (flow_dir_path, 1),
            outflow_vector_path,
            target_watersheds_path,
            write_diagnostic_vector=True,
            working_dir=self.workspace_dir,
            remove_temp_files=False)

        # I'm deliberately only testing that the diagnostic files exist, not
        # the contents.  The diagnostic files should be for debugging only,
        # so I just want to make sure that they're created.
        num_diagnostic_files = len(
            glob.glob(os.path.join(self.workspace_dir, '**/*_seeds.gpkg')))
        self.assertEqual(num_diagnostic_files, 3)  # 3 features valid
コード例 #29
0
    def test_watersheds_trivial(self):
        """PGP watersheds: test trivial delineation."""
        flow_dir_array = numpy.array(
            [[6, 6, 6, 6, 6, 6, 6, 6, 6, 6], [6, 6, 6, 6, 6, 6, 6, 6, 6, 6],
             [6, 6, 6, 6, 6, 6, 6, 6, 6, 6], [6, 6, 6, 6, 6, 6, 6, 6, 6, 255],
             [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 2, 2, 2, 2, 2, 2, 2, 2, 255],
             [2, 2, 2, 2, 2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
             [2, 2, 2, 2, 2, 2, 2, 2, 2, 2], [2, 2, 2, 2, 2, 2, 2, 2, 2, 2]],
            dtype=numpy.int8)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84 / UTM zone 31s
        srs_wkt = srs.ExportToWkt()

        flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
        pygeoprocessing.numpy_array_to_raster(base_array=flow_dir_array,
                                              target_nodata=255,
                                              pixel_size=(2, -2),
                                              origin=(2, -2),
                                              projection_wkt=srs_wkt,
                                              target_path=flow_dir_path)

        # These geometries test:
        #  * Delineation works with varying geometry types
        #  * That we exclude seed pixels that are over nodata
        #  * That we exclude seed pixels off the bounds of the raster
        horizontal_line = shapely.geometry.LineString([(19, -11), (25, -11)])
        vertical_line = shapely.geometry.LineString([(21, -9), (21, -13)])
        square = shapely.geometry.box(17, -13, 21, -9)
        point = shapely.geometry.Point(21, -11)

        outflow_vector_path = os.path.join(self.workspace_dir, 'outflow.gpkg')
        pygeoprocessing.shapely_geometry_to_vector(
            [horizontal_line, vertical_line, square, point],
            outflow_vector_path,
            srs_wkt,
            'GPKG', {
                'polygon_id': ogr.OFTInteger,
                'field_string': ogr.OFTString,
                'other': ogr.OFTReal
            }, [{
                'polygon_id': 1,
                'field_string': 'hello world',
                'other': 1.111
            }, {
                'polygon_id': 2,
                'field_string': 'hello foo',
                'other': 2.222
            }, {
                'polygon_id': 3,
                'field_string': 'hello bar',
                'other': 3.333
            }, {
                'polygon_id': 4,
                'field_string': 'hello baz',
                'other': 4.444
            }],
            ogr_geom_type=ogr.wkbUnknown)

        target_watersheds_path = os.path.join(self.workspace_dir,
                                              'watersheds.gpkg')

        pygeoprocessing.routing.delineate_watersheds_d8(
            (flow_dir_path, 1),
            outflow_vector_path,
            target_watersheds_path,
            target_layer_name='watersheds_something')

        watersheds_vector = gdal.OpenEx(target_watersheds_path, gdal.OF_VECTOR)
        watersheds_layer = watersheds_vector.GetLayer('watersheds_something')
        self.assertEqual(watersheds_layer.GetFeatureCount(), 4)

        # All features should have the same watersheds, both in area and
        # geometry.
        flow_dir_bbox = pygeoprocessing.get_raster_info(
            flow_dir_path)['bounding_box']
        expected_watershed_geometry = shapely.geometry.box(*flow_dir_bbox)
        expected_watershed_geometry = expected_watershed_geometry.difference(
            shapely.geometry.box(20, -2, 22, -10))
        expected_watershed_geometry = expected_watershed_geometry.difference(
            shapely.geometry.box(20, -12, 22, -22))
        pygeoprocessing.shapely_geometry_to_vector(
            [expected_watershed_geometry],
            os.path.join(self.workspace_dir, 'foo.gpkg'),
            srs_wkt,
            'GPKG',
            ogr_geom_type=ogr.wkbGeometryCollection)

        id_to_fields = {}
        for feature in watersheds_layer:
            geometry = feature.GetGeometryRef()
            shapely_geom = shapely.wkb.loads(geometry.ExportToWkb())
            self.assertEqual(shapely_geom.area,
                             expected_watershed_geometry.area)
            self.assertEqual(
                shapely_geom.intersection(expected_watershed_geometry).area,
                expected_watershed_geometry.area)
            self.assertEqual(
                shapely_geom.difference(expected_watershed_geometry).area, 0)

            field_values = feature.items()
            id_to_fields[field_values['polygon_id']] = field_values

        outflow_vector = gdal.OpenEx(outflow_vector_path, gdal.OF_VECTOR)
        outflow_layer = outflow_vector.GetLayer()
        try:
            for feature in outflow_layer:
                self.assertEqual(id_to_fields[feature.GetField('polygon_id')],
                                 feature.items())
        finally:
            outflow_layer = None
            outflow_vector = None