Example #1
0
 def testSqliteWriting(self):
     """Test that writing a dataset to sqlite works."""
     keywords = read_keywords(SHP_BASE + '.keywords')
     layer = Vector(data=SHP_BASE + '.shp', keywords=keywords)
     test_dir = temp_dir(sub_dir='test')
     test_file = unique_filename(suffix='.sqlite', dir=test_dir)
     layer.write_to_file(test_file, sublayer='foo')
Example #2
0
def write_vector_data(data, projection, geometry, filename, keywords=None):
    """Write point data and any associated attributes to vector file

    Input:
        data: List of N dictionaries each with M fields where
              M is the number of attributes.
              A value of None is acceptable.
        projection: WKT projection information
        geometry: List of points or polygons.
        filename: Output filename
        keywords: Optional dictionary

    Note: The only format implemented is GML and SHP so the extension
    must be either .gml or .shp

    # FIXME (Ole): When the GML driver is used,
    #              the spatial reference is not stored.
    #              I suspect this is a bug in OGR.

    Background:
    * http://www.gdal.org/ogr/ogr_apitut.html (last example)
    * http://invisibleroads.com/tutorials/gdal-shapefile-points-save.html
    """

    V = Vector(data, projection, geometry, keywords=keywords)
    V.write_to_file(filename)
Example #3
0
 def testSqliteWriting(self):
     """Test that writing a dataset to sqlite works."""
     keywords = read_keywords(SHP_BASE + '.keywords')
     layer = Vector(data=SHP_BASE + '.shp', keywords=keywords)
     test_dir = temp_dir(sub_dir='test')
     test_file = unique_filename(suffix='.sqlite', dir=test_dir)
     layer.write_to_file(test_file, sublayer='foo')
Example #4
0
 def test_sqlite_writing(self):
     """Test that writing a dataset to sqlite works."""
     keywords = {}
     layer = Vector(data=SHP_BASE + '.shp', keywords=keywords)
     test_dir = temp_dir(sub_dir='test')
     test_file = unique_filename(suffix='.sqlite', dir=test_dir)
     layer.write_to_file(test_file, sublayer='foo')
     self.assertTrue(os.path.exists(test_file))
Example #5
0
 def test_sqlite_writing(self):
     """Test that writing a dataset to sqlite works."""
     keywords = read_keywords(SHP_BASE + ".keywords")
     layer = Vector(data=SHP_BASE + ".shp", keywords=keywords)
     test_dir = temp_dir(sub_dir="test")
     test_file = unique_filename(suffix=".sqlite", dir=test_dir)
     layer.write_to_file(test_file, sublayer="foo")
     self.assertTrue(os.path.exists(test_file))
Example #6
0
 def test_sqlite_writing(self):
     """Test that writing a dataset to sqlite works."""
     keywords = {}
     layer = Vector(data=SHP_BASE + '.shp', keywords=keywords)
     test_dir = temp_dir(sub_dir='test')
     test_file = unique_filename(suffix='.sqlite', dir=test_dir)
     layer.write_to_file(test_file, sublayer='foo')
     self.assertTrue(os.path.exists(test_file))
Example #7
0
    def test_clip_points_by_polygons_with_holes0(self):
        """Points can be clipped by polygons with holes
        """

        # Define an outer ring
        outer_ring = numpy.array([[106.79, -6.233], [106.80, -6.24],
                                  [106.78, -6.23], [106.77, -6.21],
                                  [106.79, -6.233]])

        # Define inner rings
        inner_rings = [
            numpy.array([[106.77827, -6.2252], [106.77775, -6.22378],
                         [106.78, -6.22311], [106.78017, -6.22530],
                         [106.77827, -6.2252]])[::-1],
            numpy.array([[106.78652, -6.23215], [106.78642, -6.23075],
                         [106.78746, -6.23143], [106.78831, -6.23307],
                         [106.78652, -6.23215]])[::-1]
        ]

        v = Vector(
            geometry=[Polygon(outer_ring=outer_ring, inner_rings=inner_rings)])
        assert v.is_polygon_data

        # Write it to file
        tmp_filename = unique_filename(suffix='.shp')
        v.write_to_file(tmp_filename)

        # Read polygon it back
        L = read_layer(tmp_filename)
        P = L.get_geometry(as_geometry_objects=True)[0]

        outer_ring = P.outer_ring
        inner_ring0 = P.inner_rings[0]
        inner_ring1 = P.inner_rings[1]

        # Make some test points
        points = generate_random_points_in_bbox(outer_ring, 1000, seed=13)

        # Clip to outer ring, excluding holes
        indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)

        # Sanity
        for point in points[indices, :]:
            # Must be inside outer ring
            assert is_inside_polygon(point, outer_ring)

            # But not in any of the inner rings
            assert not is_inside_polygon(point, inner_ring0)
            assert not is_inside_polygon(point, inner_ring1)

        if False:
            # Store for visual check
            pol = Vector(geometry=[P])
            tmp_filename = unique_filename(suffix='.shp')
            pol.write_to_file(tmp_filename)
            # print 'Polygon with holes written to %s' % tmp_filename

            pts = Vector(geometry=points[indices, :])
            tmp_filename = unique_filename(suffix='.shp')
            pts.write_to_file(tmp_filename)
Example #8
0
        #new_geom = []
        #new_data = []
        #
        #for i, d in enumerate(data):
        #    if d['affected']:
        #        g = geom[i]
        #        new_geom.append(g)
        #        new_data.append(d)

        # Keep all polygons
        new_geom = res.get_geometry()
        new_data = res.get_data()

        date = os.path.split(args.filename)[-1].split('_')[0]
        v = Vector(geometry=new_geom,
                   data=new_data,
                   projection=res.projection,
                   keywords={
                       'category':
                       'hazard',
                       'subcategory':
                       'flood',
                       'title': ('%d hour flood forecast regions '
                                 'in Jakarta at %s' % (args.hours, date))
                   })

        polyforecast_filename = (os.path.splitext(tif_filename)[0] +
                                 '_regions.shp')
        v.write_to_file(polyforecast_filename)
        print 'Wrote tagged polygons to %s' % polyforecast_filename
    def test_tag_regions_by_flood(self):
        """Regions can be tagged correctly with data from flood forecasts
        """

        threshold = 0.3
        label = 'affected'

        tif_filename = convert_netcdf2tif(self.nc_filename, 24, verbose=False)
        region_filename = os.path.join(TESTDATA, 'rw_jakarta_singlepart.shp')

        grid = read_layer(tif_filename)
        polygons = read_layer(region_filename)

        res = tag_polygons_by_grid(polygons, grid,
                                   threshold=threshold,
                                   tag=label)
        os.remove(tif_filename)
        geom = res.get_geometry()
        data = res.get_data()

        # Check correctness of affected regions
        affected_geom = []
        affected_data = []
        for i, d in enumerate(data):
            if d[label]:
                g = geom[i]
                affected_geom.append(g)
                affected_data.append(d)

        assert len(affected_geom) == 37
        assert len(affected_data) == 37

        # Check that every grid point exceeding threshold lies inside
        # one of the polygons marked as affected
        P, V = grid.to_vector_points()

        flooded_points_geom = []
        flooded_points_data = []
        for i, point in enumerate(P):
            val = V[i]
            if val > threshold:
                # Point that is flooded must be in one of the tagged polygons
                found = False
                for polygon in affected_geom:
                    if is_inside_polygon(point, polygon):
                        found = True
                msg = ('No affected polygon was found for point [%f, %f] '
                       'with value %f' % (point[0], point[1], val))
                verify(found, msg)

                # Collected flooded points for visualisation
                flooded_points_geom.append(point)
                flooded_points_data.append({'depth': val})

        # To generate files for visual inspection.
        # See
# https://raw.github.com/AIFDR/inasafe/master/files/flood_tagging_test.png
# https://github.com/AIFDR/inasafe/blob/master/files/flood_tagging_test.tgz

        tmp_filename = unique_filename(prefix='grid', suffix='.tif')
        grid.write_to_file(tmp_filename)
        #print 'Grid written to ', tmp_filename

        tmp_filename = unique_filename(prefix='regions', suffix='.shp')
        res.write_to_file(tmp_filename)
        #print 'Regions written to ', tmp_filename

        tmp_filename = unique_filename(prefix='flooded_points', suffix='.shp')
        v = Vector(geometry=flooded_points_geom, data=flooded_points_data)
        v.write_to_file(tmp_filename)
Example #10
0
        # but will reduce overall bounding box for buildings under
        # consideration)
        # geom = res.get_geometry()
        # data = res.get_data()
        # new_geom = []
        # new_data = []
        #
        # for i, d in enumerate(data):
        #    if d['affected']:
        #        g = geom[i]
        #        new_geom.append(g)
        #        new_data.append(d)

        # Keep all polygons
        new_geom = res.get_geometry()
        new_data = res.get_data()

        date = os.path.split(args.filename)[-1].split('_')[0]
        v = Vector(geometry=new_geom, data=new_data,
                   projection=res.projection,
                   keywords={'category': 'hazard',
                             'subcategory': 'flood',
                             'title': ('%d hour flood forecast regions '
                                       'in Jakarta at %s' % (args.hours,
                                                             date))})

        polyforecast_filename = (os.path.splitext(tif_filename)[0] +
                                 '_regions.shp')
        v.write_to_file(polyforecast_filename)
        print 'Wrote tagged polygons to %s' % polyforecast_filename
Example #11
0
    def test_clip_points_by_polygons_with_holes0(self):
        """Points can be clipped by polygons with holes
        """

        # Define an outer ring
        outer_ring = numpy.array([[106.79, -6.233],
                                  [106.80, -6.24],
                                  [106.78, -6.23],
                                  [106.77, -6.21],
                                  [106.79, -6.233]])

        # Define inner rings
        inner_rings = [numpy.array([[106.77827, -6.2252],
                                    [106.77775, -6.22378],
                                    [106.78, -6.22311],
                                    [106.78017, -6.22530],
                                    [106.77827, -6.2252]])[::-1],
                       numpy.array([[106.78652, -6.23215],
                                    [106.78642, -6.23075],
                                    [106.78746, -6.23143],
                                    [106.78831, -6.23307],
                                    [106.78652, -6.23215]])[::-1]]

        v = Vector(geometry=[Polygon(outer_ring=outer_ring,
                                     inner_rings=inner_rings)])
        assert v.is_polygon_data

        # Write it to file
        tmp_filename = unique_filename(suffix='.shp')
        v.write_to_file(tmp_filename)

        # Read polygon it back
        L = read_layer(tmp_filename)
        P = L.get_geometry(as_geometry_objects=True)[0]

        outer_ring = P.outer_ring
        inner_ring0 = P.inner_rings[0]
        inner_ring1 = P.inner_rings[1]

        # Make some test points
        points = generate_random_points_in_bbox(outer_ring, 1000, seed=13)

        # Clip to outer ring, excluding holes
        indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)

        # Sanity
        for point in points[indices, :]:
            # Must be inside outer ring
            assert is_inside_polygon(point, outer_ring)

            # But not in any of the inner rings
            assert not is_inside_polygon(point, inner_ring0)
            assert not is_inside_polygon(point, inner_ring1)

        if False:
            # Store for visual check
            pol = Vector(geometry=[P])
            tmp_filename = unique_filename(suffix='.shp')
            pol.write_to_file(tmp_filename)
            print 'Polygon with holes written to %s' % tmp_filename

            pts = Vector(geometry=points[indices, :])
            tmp_filename = unique_filename(suffix='.shp')
            pts.write_to_file(tmp_filename)
            print 'Clipped points written to %s' % tmp_filename
Example #12
0
    def test_clip_points_by_polygons_with_holes_real(self):
        """Points can be clipped by polygons with holes (real data)
        """

        # Read real polygon with holes
        filename = '%s/%s' % (TESTDATA, 'donut.shp')
        L = read_layer(filename)

        # --------------------------------------------
        # Pick one polygon that has 2 inner rings
        P = L.get_geometry(as_geometry_objects=True)[1]

        outer_ring = P.outer_ring
        inner_ring0 = P.inner_rings[0]
        inner_ring1 = P.inner_rings[1]

        # Make some test points
        points_in_bbox = generate_random_points_in_bbox(outer_ring, 1000)
        points_in_inner_ring0 = populate_polygon(inner_ring0, 2, seed=13)
        points_in_inner_ring1 = populate_polygon(inner_ring1, 2, seed=17)
        points = numpy.concatenate((points_in_bbox,
                                    points_in_inner_ring0,
                                    points_in_inner_ring1))

        # Clip
        indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)

        # Sanity
        for point in points[indices, :]:
            # Must be inside outer ring
            assert is_inside_polygon(point, outer_ring)

            # But not in any of the inner rings
            assert not is_inside_polygon(point, inner_ring0)
            assert not is_inside_polygon(point, inner_ring1)

        # ---------------------------------------------------------
        # Pick a polygon that has 1 inner ring (nice visualisation)
        P = L.get_geometry(as_geometry_objects=True)[9]

        outer_ring = P.outer_ring
        inner_ring = P.inner_rings[0]

        # Make some test points
        points = generate_random_points_in_bbox(outer_ring, 500)

        # Clip
        indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)

        # Sanity
        for point in points[indices, :]:
            # Must be inside outer ring
            assert is_inside_polygon(point, outer_ring)

            # But not in the inner ring
            assert not is_inside_polygon(point, inner_ring)

        # Store for visual check (nice one!)
        # Uncomment os.remove if you want see the layers
        pol = Vector(geometry=[P])
        tmp_filename = unique_filename(suffix='.shp')
        pol.write_to_file(tmp_filename)
        # print 'Polygon with holes written to %s' % tmp_filename
        os.remove(tmp_filename)

        pts = Vector(geometry=points[indices, :])
        tmp_filename = unique_filename(suffix='.shp')
        pts.write_to_file(tmp_filename)
        # print 'Clipped points written to %s' % tmp_filename
        os.remove(tmp_filename)
Example #13
0
    def test_tag_regions_by_flood(self):
        """Regions can be tagged correctly with data from flood forecasts.
        """
        threshold = 0.3
        label = 'affected'

        tif_filename = convert_netcdf2tif(self.nc_filename, 24, verbose=False)
        region_filename = os.path.join(TESTDATA, 'rw_jakarta_singlepart.shp')

        grid = read_layer(tif_filename)
        polygons = read_layer(region_filename)

        res = tag_polygons_by_grid(polygons, grid,
                                   threshold=threshold,
                                   tag=label)
        os.remove(tif_filename)
        geom = res.get_geometry()
        data = res.get_data()

        # Check correctness of affected regions
        affected_geom = []
        affected_data = []
        for i, d in enumerate(data):
            if d[label]:
                g = geom[i]
                affected_geom.append(g)
                affected_data.append(d)

        assert len(affected_geom) == 37
        assert len(affected_data) == 37

        # Check that every grid point exceeding threshold lies inside
        # one of the polygons marked as affected
        P, V = grid.to_vector_points()

        flooded_points_geom = []
        flooded_points_data = []
        for i, point in enumerate(P):
            val = V[i]
            if val > threshold:
                # Point that is flooded must be in one of the tagged polygons
                found = False
                for polygon in affected_geom:
                    if is_inside_polygon(point, polygon):
                        found = True
                msg = ('No affected polygon was found for point [%f, %f] '
                       'with value %f' % (point[0], point[1], val))
                verify(found, msg)

                # Collected flooded points for visualisation
                flooded_points_geom.append(point)
                flooded_points_data.append({'depth': val})

        # To generate files for visual inspection.
        # See
# https://raw.github.com/AIFDR/inasafe/master/files/flood_tagging_test.png
# https://github.com/AIFDR/inasafe/blob/master/files/flood_tagging_test.tgz

        tmp_filename = unique_filename(prefix='grid', suffix='.tif')
        grid.write_to_file(tmp_filename)
        #print 'Grid written to ', tmp_filename

        tmp_filename = unique_filename(prefix='regions', suffix='.shp')
        res.write_to_file(tmp_filename)
        #print 'Regions written to ', tmp_filename

        tmp_filename = unique_filename(prefix='flooded_points', suffix='.shp')
        v = Vector(geometry=flooded_points_geom, data=flooded_points_data)
        v.write_to_file(tmp_filename)
def processFloodEvent(netcdf_file=None, hours=24):
    """A function to process netcdf_file to a forecast file.
    """
    print 'Start flood forecasting'

    if netcdf_file is None:
        # retrieve data from the web
        netcdf_file = download_file_url(netcdf_url, forecast_directory)
    else:
        netcdf_file = download_file_url(netcdf_url, name=netcdf_file,
            download_directory=forecast_directory)
    print 'Do flood forecasting for %s ...' % netcdf_file

#    # check if a forecasting file has been created or not
#    is_exist, polyforecast_filepath = get_result_file_name(netcdf_file, hours)
#
#    if is_exist:
#        print 'Current flood forecasting has been already created.'
#        print 'You can look it at %s' % polyforecast_filepath
#        return

    # convert to tif
#    tif_file = polyforecast_filepath.replace('_regions.shp', '.tif')
    tif_filename = convert_netcdf2tif(netcdf_file, hours,
            verbose=False, output_dir=flood_directory)
    print 'tif_file', tif_filename
    tif_file = read_layer(tif_filename)

    # check if there is another file with the same name
    # if so, do not do the forecasting
    polyforecast_filepath = tif_filename.replace('.tif', '_regions.shp')
    zip_filename = polyforecast_filepath.replace('.shp', '.zip')
    if os.path.isfile(zip_filename):
        print ('File %s is exist, so we do not do the forecasting'
               % zip_filename)
    else:
        my_polygons = read_layer(polygons_path)
        my_result = tag_polygons_by_grid(my_polygons, tif_file, threshold=0.3,
            tag='affected')

        new_geom = my_result.get_geometry()
        new_data = my_result.get_data()

        date = os.path.split(netcdf_file)[-1].split('_')[0]

        v = Vector(geometry=new_geom, data=new_data,
            projection=my_result.projection,
            keywords={'category': 'hazard',
                      'subcategory': 'flood',
                      'title': ('%d hour flood forecast regions '
                                'in Jakarta at %s' % (hours,
                                                      date))})

        print 'polyforecast_filepath', polyforecast_filepath
        v.write_to_file(polyforecast_filepath)
        print 'Wrote tagged polygons to %s' % polyforecast_filepath

    # zip all file
    if os.path.isfile(zip_filename):
        print 'Has been zipped to %s' % zip_filename
    else:
        zip_shp(polyforecast_filepath, extra_ext=['.keywords'],
            remove_file=True)
        print 'Zipped to %s' % zip_filename
Example #15
0
    def test_clip_raster_by_polygons(self):
        """Raster grids can be clipped by polygon layers

        # See qgis project in test data: raster_point_and_clipping_test.qgs
        """

        # Name input files
        poly = join(TESTDATA, 'kabupaten_jakarta_singlepart.shp')
        grid = join(TESTDATA, 'population_5x5_jakarta.asc')

        # Get layers using API
        P = read_layer(poly)
        R = read_layer(grid)

        M = len(P)
        N = len(R)
        assert N == 56

        # Clip
        C = clip_raster_by_polygons(R, P)
        assert len(C) == M

        # Check points inside polygon
        tot = 0
        for c in C:
            tot += len(c)
        assert tot == 14

        # Check that points are inside the right polygon
        for i, polygon in enumerate(P.get_geometry()):

            points = C[i][0]
            values = C[i][1]

            # Sanity first
            for point in points:
                assert is_inside_polygon(point, polygon)

            # Specific tests against raster pixel values inside polygons
            # The values are read from qgis
            if i == 0:
                assert len(points) == 6
                assert numpy.allclose(values[0], 200951)
                assert numpy.allclose(values[1], 283237)
                assert numpy.allclose(values[2], 278385)
                assert numpy.allclose(values[3], 516061)
                assert numpy.allclose(values[4], 207414)
                assert numpy.allclose(values[5], 344466)

            elif i == 1:
                assert len(points) == 2
                msg = ('Got wrong coordinates %s, expected %s' %
                       (str(points[0, :]), str([106.8125, -6.1875])))
                assert numpy.allclose(points[0, :], [106.8125, -6.1875]), msg
                assert numpy.allclose(points[1, :], [106.8541667, -6.1875])
                assert numpy.allclose(values[0], 331942)
                assert numpy.allclose(values[1], 496446)
            elif i == 2:
                assert len(points) == 7
                assert numpy.allclose(values[0], 268579)
                assert numpy.allclose(values[1], 155795)
                assert numpy.allclose(values[2], 403674)
                assert numpy.allclose(values[3], 259280)
                assert numpy.allclose(values[4], 284526)
                assert numpy.allclose(values[5], 334370)
                assert numpy.allclose(values[6], 143325)
            elif i == 3:
                assert len(points) == 0  # Degenerate
            elif i == 4:
                assert len(points) == 0  # Degenerate
            elif i == 5:
                assert len(points) == 8
                assert numpy.allclose(values[0], 279103)
                assert numpy.allclose(values[1], 205762)
                assert numpy.allclose(values[2], 428705)
                assert numpy.allclose(values[3], 331093)
                assert numpy.allclose(values[4], 227514)
                assert numpy.allclose(values[5], 249308)
                assert numpy.allclose(values[6], 215739)
                assert numpy.allclose(values[7], 147447)
            elif i == 6:
                assert len(points) == 6
                assert numpy.allclose(values[0], 61836.4)
                assert numpy.allclose(values[1], 165723)
                assert numpy.allclose(values[2], 151307)
                assert numpy.allclose(values[3], 343787)
                assert numpy.allclose(values[4], 303627)
                assert numpy.allclose(values[5], 225232)

            # Generate layer objects
            values = [{'value': x} for x in C[i][1]]
            point_layer = Vector(data=values,
                                 geometry=points,
                                 projection=P.get_projection())

            if len(point_layer) > 0:
                # Geometry is only defined for layers that are not degenerate
                assert point_layer.is_point_data

            polygon_layer = Vector(geometry=[polygon],
                                   projection=P.get_projection())
            assert polygon_layer.is_polygon_data

            # Generate spatial data for visualisation with e.g. QGIS
            if False:
                point_layer.write_to_file('points_%i.shp' % i)
                polygon_layer.write_to_file('polygon_%i.shp' % i)
Example #16
0
def process_flood_event(netcdf_file=None, hours=24):
    """A function to process this_netcdf_file to a forecast file.

    :param netcdf_file: The netcdf file. If it's None the download it.

    :param hours: Positive integer determining how many bands to use.
    :type hours: int
    """
    print 'Start flood forecasting'

    if netcdf_file is None:
        # retrieve data from the web
        netcdf_file = download_file_url(netcdf_url, forecast_directory)
    else:
        netcdf_file = download_file_url(netcdf_url,
                                        name=netcdf_file,
                                        download_directory=forecast_directory)
    print 'Do flood forecasting for %s ...' % netcdf_file

    ## check if a forecasting file has been created or not
    # is_exist, polyforecast_filepath = get_result_file_name(this_netcdf_file,
    # hours)
    #
    #if is_exist:
    #    print 'Current flood forecasting has been already created.'
    #    print 'You can look it at %s' % polyforecast_filepath
    #    return

    # convert to tif
    # tif_file = polyforecast_filepath.replace('_regions.shp', '.tif')
    tif_filename = convert_netcdf2tif(netcdf_file,
                                      hours,
                                      verbose=False,
                                      output_dir=flood_directory)
    print 'tif_file', tif_filename
    tif_file = read_layer(tif_filename)

    # check if there is another file with the same name
    # if so, do not do the forecasting
    polyforecast_filepath = tif_filename.replace('.tif', '_regions.shp')
    zip_filename = polyforecast_filepath.replace('.shp', '.zip')
    if os.path.isfile(zip_filename):
        print('File %s is exist, so we do not do the forecasting' %
              zip_filename)
    else:
        polygons = read_layer(polygons_path)
        result = tag_polygons_by_grid(polygons,
                                      tif_file,
                                      threshold=0.3,
                                      tag='affected')

        new_geom = result.get_geometry()
        new_data = result.get_data()

        date = os.path.split(netcdf_file)[-1].split('_')[0]

        v = Vector(geometry=new_geom,
                   data=new_data,
                   projection=result.projection,
                   keywords={
                       'category':
                       'hazard',
                       'subcategory':
                       'flood',
                       'title': ('%d hour flood forecast regions '
                                 'in Jakarta at %s' % (hours, date))
                   })

        print 'polyforecast_filepath', polyforecast_filepath
        v.write_to_file(polyforecast_filepath)
        print 'Wrote tagged polygons to %s' % polyforecast_filepath

    # zip all file
    if os.path.isfile(zip_filename):
        print 'Has been zipped to %s' % zip_filename
    else:
        zip_shp(polyforecast_filepath,
                extra_ext=['.keywords'],
                remove_file=True)
        print 'Zipped to %s' % zip_filename
Example #17
0
    def test_clip_raster_by_polygons(self):
        """Raster grids can be clipped by polygon layers

        # See qgis project in test data: raster_point_and_clipping_test.qgs
        """

        # Name input files
        poly = join(TESTDATA, 'kabupaten_jakarta_singlepart.shp')
        grid = join(TESTDATA, 'population_5x5_jakarta.asc')

        # Get layers using API
        P = read_layer(poly)
        R = read_layer(grid)

        M = len(P)
        N = len(R)
        assert N == 56

        # Clip
        C = clip_raster_by_polygons(R, P)
        assert len(C) == M

        # Check points inside polygon
        tot = 0
        for c in C:
            tot += len(c)
        assert tot == 14

        # Check that points are inside the right polygon
        for i, polygon in enumerate(P.get_geometry()):

            points = C[i][0]
            values = C[i][1]

            # Sanity first
            for point in points:
                assert is_inside_polygon(point, polygon)

            # Specific tests against raster pixel values inside polygons
            # The values are read from qgis
            if i == 0:
                assert len(points) == 6
                assert numpy.allclose(values[0], 200951)
                assert numpy.allclose(values[1], 283237)
                assert numpy.allclose(values[2], 278385)
                assert numpy.allclose(values[3], 516061)
                assert numpy.allclose(values[4], 207414)
                assert numpy.allclose(values[5], 344466)

            elif i == 1:
                assert len(points) == 2
                msg = ('Got wrong coordinates %s, expected %s'
                       % (str(points[0, :]), str([106.8125, -6.1875])))
                assert numpy.allclose(points[0, :], [106.8125, -6.1875]), msg
                assert numpy.allclose(points[1, :], [106.8541667, -6.1875])
                assert numpy.allclose(values[0], 331942)
                assert numpy.allclose(values[1], 496446)
            elif i == 2:
                assert len(points) == 7
                assert numpy.allclose(values[0], 268579)
                assert numpy.allclose(values[1], 155795)
                assert numpy.allclose(values[2], 403674)
                assert numpy.allclose(values[3], 259280)
                assert numpy.allclose(values[4], 284526)
                assert numpy.allclose(values[5], 334370)
                assert numpy.allclose(values[6], 143325)
            elif i == 3:
                assert len(points) == 0  # Degenerate
            elif i == 4:
                assert len(points) == 0  # Degenerate
            elif i == 5:
                assert len(points) == 8
                assert numpy.allclose(values[0], 279103)
                assert numpy.allclose(values[1], 205762)
                assert numpy.allclose(values[2], 428705)
                assert numpy.allclose(values[3], 331093)
                assert numpy.allclose(values[4], 227514)
                assert numpy.allclose(values[5], 249308)
                assert numpy.allclose(values[6], 215739)
                assert numpy.allclose(values[7], 147447)
            elif i == 6:
                assert len(points) == 6
                assert numpy.allclose(values[0], 61836.4)
                assert numpy.allclose(values[1], 165723)
                assert numpy.allclose(values[2], 151307)
                assert numpy.allclose(values[3], 343787)
                assert numpy.allclose(values[4], 303627)
                assert numpy.allclose(values[5], 225232)

            # Generate layer objects
            values = [{'value': x} for x in C[i][1]]
            point_layer = Vector(data=values, geometry=points,
                                 projection=P.get_projection())

            if len(point_layer) > 0:
                # Geometry is only defined for layers that are not degenerate
                assert point_layer.is_point_data

            polygon_layer = Vector(geometry=[polygon],
                                   projection=P.get_projection())
            assert polygon_layer.is_polygon_data

            # Generate spatial data for visualisation with e.g. QGIS
            if False:
                point_layer.write_to_file('points_%i.shp' % i)
                polygon_layer.write_to_file('polygon_%i.shp' % i)