Example #1
0
def convert_line_to_points(V, delta):
    """Convert line vector data to point vector data

    Input
        V: Vector layer with line data
        delta: Incremental step to find the points
    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be line vector data' % V
    verify(V.is_line_data, msg)

    geometry = V.get_geometry()
    data = V.get_data()
    N = len(V)

    # Calculate centroids for each polygon
    points = []
    new_data = []
    for i in range(N):
        c = points_along_line(geometry[i], delta)
        # We need to create a data entry for each point.
        new_data.extend([data[i] for thing in c])
        points.extend(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=new_data,
               projection=V.get_projection(),
               geometry=points,
               name='%s_point_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Example #2
0
    def interpolate(self, X, name=None):
        """Interpolate values of this raster layer to other layer

        Input
            X: Layer object defining target
            name: Optional name of interpolated layer.
                  If name is None, the name of self is used.

        Output
            Y: Layer object with values of this raster layer interpolated to
               geometry of input layer X

        Note: If target geometry is polygon, data will be interpolated to
        its centroids and the output is a point data set.
        """

        if X.is_raster:
            if self.get_geotransform() != X.get_geotransform():
                # Need interpolation between grids
                msg = 'Intergrid interpolation not yet implemented'
                raise Exception(msg)
            else:
                # Rasters are aligned, no need to interpolate
                return self
        else:
            # Interpolate this raster layer to geometry of X
            msg = ('Name must be either a string or None. I got %s' %
                   (str(type(X)))[1:-1])
            verify(name is None or isinstance(name, basestring), msg)

            return interpolate_raster_vector(self, X, name)
Example #3
0
def convert_line_to_points(V, delta):
    """Convert line vector data to point vector data

    Input
        V: Vector layer with line data
        delta: Incremental step to find the points
    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be line vector data' % V
    verify(V.is_line_data, msg)

    geometry = V.get_geometry()
    data = V.get_data()
    N = len(V)

    # Calculate centroids for each polygon
    points = []
    new_data = []
    for i in range(N):
        c = points_along_line(geometry[i], delta)
        # We need to create a data entry for each point.
        new_data.extend([data[i] for thing in c])
        points.extend(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=new_data,
               projection=V.get_projection(),
               geometry=points,
               name='%s_point_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Example #4
0
    def interpolate(self, X, name=None):
        """Interpolate values of this raster layer to other layer

        Input
            X: Layer object defining target
            name: Optional name of interpolated layer.
                  If name is None, the name of self is used.

        Output
            Y: Layer object with values of this raster layer interpolated to
               geometry of input layer X

        Note: If target geometry is polygon, data will be interpolated to
        its centroids and the output is a point data set.
        """

        if X.is_raster:
            if self.get_geotransform() != X.get_geotransform():
                # Need interpolation between grids
                msg = 'Intergrid interpolation not yet implemented'
                raise Exception(msg)
            else:
                # Rasters are aligned, no need to interpolate
                return self
        else:
            # Interpolate this raster layer to geometry of X
            msg = ('Name must be either a string or None. I got %s'
                   % (str(type(X)))[1:-1])
            verify(name is None or isinstance(name, basestring), msg)

            return interpolate_raster_vector(self, X, name)
Example #5
0
def convert_polygons_to_centroids(V):
    """Convert polygon vector data to point vector data

    Input
        V: Vector layer with polygon data

    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be polygon vector data' % V
    verify(V.is_polygon_data, msg)

    geometry = V.get_geometry()
    N = len(V)

    # Calculate points for each polygon
    centroids = []
    for i in range(N):
        c = calculate_polygon_centroid(geometry[i])
        centroids.append(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=V.get_data(),
               projection=V.get_projection(),
               geometry=centroids,
               name='%s_centroid_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Example #6
0
def interpolate_raster_vector(R, V, name=None):
    """Interpolate from raster layer to vector data

    Input
        R: Raster data set (grid)
        V: Vector data set (points or polygons)
        name: Name for new attribute.
              If None (default) the name of R is used

    Output
        I: Vector data set; points located as V with values interpolated from R

    Note: If target geometry is polygon, data will be interpolated to
    its centroids and the output is a point data set.
    """

    # Input checks
    verify(R.is_raster)
    verify(V.is_vector)

    if V.is_polygon_data:
        # Use centroids, in case of polygons
        P = convert_polygons_to_centroids(V)
    else:
        P = V

    return interpolate_raster_vector_points(R, P, name=name)
Example #7
0
def convert_polygons_to_centroids(V):
    """Convert polygon vector data to point vector data

    Input
        V: Vector layer with polygon data

    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be polygon vector data' % V
    verify(V.is_polygon_data, msg)

    geometry = V.get_geometry()
    N = len(V)

    # Calculate points for each polygon
    centroids = []
    for i in range(N):
        c = calculate_polygon_centroid(geometry[i])
        centroids.append(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=V.get_data(),
               projection=V.get_projection(),
               geometry=centroids,
               name='%s_centroid_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Example #8
0
    def get_geometry(self):
        """Return longitudes and latitudes (the axes) for grid.

        Return two vectors (longitudes and latitudes) corresponding to
        grid. The values are offset by half a pixel size to correspond to
        pixel registration.

        I.e. If the grid origin (top left corner) is (105, 10) and the
        resolution is 1 degrees in each direction, then the vectors will
        take the form

        longitudes = [100.5, 101.5, ..., 109.5]
        latitudes = [0.5, 1.5, ..., 9.5]
        """

        # Get parameters for axes
        g = self.get_geotransform()

        lon_ul = g[0]  # Longitude of upper left corner
        lat_ul = g[3]  # Latitude of upper left corner
        dx = g[1]      # Longitudinal resolution
        dy = - g[5]    # Latitudinal resolution (always(?) negative)
        nx = self.columns
        ny = self.rows

        verify(dx > 0)
        verify(dy > 0)

        # Coordinates of lower left corner
        lon_ll = lon_ul
        lat_ll = lat_ul - ny * dy

        # Coordinates of upper right corner
        lon_ur = lon_ul + nx * dx

        # Define pixel centers along each directions
        dy2 = dy / 2
        dx2 = dx / 2

        # Define longitudes and latitudes for each axes
        x = numpy.linspace(lon_ll + dx2,
                           lon_ur - dx2, nx)
        y = numpy.linspace(lat_ll + dy2,
                           lat_ul - dy2, ny)

        # Return
        return x, y
Example #9
0
    def get_geometry(self):
        """Return longitudes and latitudes (the axes) for grid.

        Return two vectors (longitudes and latitudes) corresponding to
        grid. The values are offset by half a pixel size to correspond to
        pixel registration.

        I.e. If the grid origin (top left corner) is (105, 10) and the
        resolution is 1 degrees in each direction, then the vectors will
        take the form

        longitudes = [100.5, 101.5, ..., 109.5]
        latitudes = [0.5, 1.5, ..., 9.5]
        """

        # Get parameters for axes
        g = self.get_geotransform()

        lon_ul = g[0]  # Longitude of upper left corner
        lat_ul = g[3]  # Latitude of upper left corner
        dx = g[1]  # Longitudinal resolution
        dy = -g[5]  # Latitudinal resolution (always(?) negative)
        nx = self.columns
        ny = self.rows

        verify(dx > 0)
        verify(dy > 0)

        # Coordinates of lower left corner
        lon_ll = lon_ul
        lat_ll = lat_ul - ny * dy

        # Coordinates of upper right corner
        lon_ur = lon_ul + nx * dx

        # Define pixel centers along each directions
        dy2 = dy / 2
        dx2 = dx / 2

        # Define longitudes and latitudes for each axes
        x = numpy.linspace(lon_ll + dx2, lon_ur - dx2, nx)
        y = numpy.linspace(lat_ll + dy2, lat_ul - dy2, ny)

        # Return
        return x, y
Example #10
0
    def write_to_file(self, filename):
        """Save raster data to file

        Input
            filename: filename with extension .tif
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extension '
               'tif allowed.' % filename)
        verify(extension in ['.tif', '.asc'], msg)
        format = DRIVER_MAP[extension]

        # Get raster data
        A = self.get_data()

        # Get Dimensions. Note numpy and Gdal swap order
        N, M = A.shape

        # Create empty file.
        # FIXME (Ole): It appears that this is created as single
        #              precision even though Float64 is specified
        #              - see issue #17
        driver = gdal.GetDriverByName(format)
        fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64)
        if fid is None:
            msg = ('Gdal could not create filename %s using '
                   'format %s' % (filename, format))
            raise Exception(msg)

        # Write metada
        fid.SetProjection(str(self.projection))
        fid.SetGeoTransform(self.geotransform)

        # Write data
        fid.GetRasterBand(1).WriteArray(A)

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')
Example #11
0
    def write_to_file(self, filename):
        """Save raster data to file

        Input
            filename: filename with extension .tif
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extension '
               'tif allowed.' % filename)
        verify(extension in ['.tif', '.asc'], msg)
        format = DRIVER_MAP[extension]

        # Get raster data
        A = self.get_data()

        # Get Dimensions. Note numpy and Gdal swap order
        N, M = A.shape

        # Create empty file.
        # FIXME (Ole): It appears that this is created as single
        #              precision even though Float64 is specified
        #              - see issue #17
        driver = gdal.GetDriverByName(format)
        fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64)
        if fid is None:
            msg = ('Gdal could not create filename %s using '
                   'format %s' % (filename, format))
            raise Exception(msg)

        # Write metada
        fid.SetProjection(str(self.projection))
        fid.SetGeoTransform(self.geotransform)

        # Write data
        fid.GetRasterBand(1).WriteArray(A)

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')
Example #12
0
    def get_bins(self, N=10, quantiles=False):
        """Get N values between the min and the max occurred in this dataset.

        Return sorted list of length N+1 where the first element is min and
        the last is max. Intermediate values depend on the keyword quantiles:
        If quantiles is True, they represent boundaries between quantiles.
        If quantiles is False, they represent equidistant interval boundaries.
        """

        min, max = self.get_extrema()

        levels = []
        if quantiles is False:
            # Linear intervals
            d = (max - min) / N

            for i in range(N):
                levels.append(min + i * d)
        else:
            # Quantiles
            # FIXME (Ole): Not 100% sure about this algorithm,
            # but it is close enough

            A = self.get_data(nan=True).flat[:]

            mask = numpy.logical_not(numpy.isnan(A))  # Omit NaN's
            A = A.compress(mask)

            A.sort()

            verify(len(A) == A.shape[0])

            d = float(len(A) + 0.5) / N
            for i in range(N):
                levels.append(A[int(i * d)])

        levels.append(max)

        return levels
Example #13
0
    def get_bins(self, N=10, quantiles=False):
        """Get N values between the min and the max occurred in this dataset.

        Return sorted list of length N+1 where the first element is min and
        the last is max. Intermediate values depend on the keyword quantiles:
        If quantiles is True, they represent boundaries between quantiles.
        If quantiles is False, they represent equidistant interval boundaries.
        """

        min, max = self.get_extrema()

        levels = []
        if quantiles is False:
            # Linear intervals
            d = (max - min) / N

            for i in range(N):
                levels.append(min + i * d)
        else:
            # Quantiles
            # FIXME (Ole): Not 100% sure about this algorithm,
            # but it is close enough

            A = self.get_data(nan=True).flat[:]

            mask = numpy.logical_not(numpy.isnan(A))  # Omit NaN's
            A = A.compress(mask)

            A.sort()

            verify(len(A) == A.shape[0])

            d = float(len(A) + 0.5) / N
            for i in range(N):
                levels.append(A[int(i * d)])

        levels.append(max)

        return levels
Example #14
0
    def get_data(self, attribute=None, index=None):
        """Get vector attributes

        Data is returned as a list where each entry is a dictionary of
        attributes for one feature. Entries in get_geometry() and
        get_data() are related as 1-to-1

        If optional argument attribute is specified and a valid name,
        then the list of values for that attribute is returned.

        If optional argument index is specified on the that value will
        be returned. Any value of index is ignored if attribute is None.
        """

        if hasattr(self, 'data'):
            if attribute is None:
                return self.data
            else:
                msg = ('Specified attribute %s does not exist in '
                       'vector layer %s. Valid names are %s'
                       '' % (attribute, self, self.data[0].keys()))
                verify(attribute in self.data[0], msg)

                if index is None:
                    # Return all values for specified attribute
                    return [x[attribute] for x in self.data]
                else:
                    # Return value for specified attribute and index
                    msg = ('Specified index must be either None or '
                           'an integer. I got %s' % index)
                    verify(type(index) == type(0), msg)

                    msg = ('Specified index must lie within the bounds '
                           'of vector layer %s which is [%i, %i]'
                           '' % (self, 0, len(self) - 1))
                    verify(0 <= index < len(self), msg)

                    return self.data[index][attribute]
        else:
            msg = 'Vector data instance does not have any attributes'
            raise Exception(msg)
Example #15
0
    def get_data(self, attribute=None, index=None):
        """Get vector attributes

        Data is returned as a list where each entry is a dictionary of
        attributes for one feature. Entries in get_geometry() and
        get_data() are related as 1-to-1

        If optional argument attribute is specified and a valid name,
        then the list of values for that attribute is returned.

        If optional argument index is specified on the that value will
        be returned. Any value of index is ignored if attribute is None.
        """

        if hasattr(self, 'data'):
            if attribute is None:
                return self.data
            else:
                msg = ('Specified attribute %s does not exist in '
                       'vector layer %s. Valid names are %s'
                       '' % (attribute, self, self.data[0].keys()))
                verify(attribute in self.data[0], msg)

                if index is None:
                    # Return all values for specified attribute
                    return [x[attribute] for x in self.data]
                else:
                    # Return value for specified attribute and index
                    msg = ('Specified index must be either None or '
                           'an integer. I got %s' % index)
                    verify(type(index) == type(0), msg)

                    msg = ('Specified index must lie within the bounds '
                           'of vector layer %s which is [%i, %i]'
                           '' % (self, 0, len(self) - 1))
                    verify(0 <= index < len(self), msg)

                    return self.data[index][attribute]
        else:
            msg = 'Vector data instance does not have any attributes'
            raise Exception(msg)
Example #16
0
    def get_topN(self, attribute, N=10):
        """Get top N features

        Input
            attribute: The name of attribute where values are sought
            N: How many

        Output
            layer: New vector layer with selected features
        """

        # FIXME (Ole): Maybe generalise this to arbitrary expressions

        # Input checks
        msg = ('Specfied attribute must be a string. '
               'I got %s' % (type(attribute)))
        verify(isinstance(attribute, basestring), msg)

        msg = 'Specified attribute was empty'
        verify(attribute != '', msg)

        msg = 'N must be a positive number. I got %i' % N
        verify(N > 0, msg)

        # Create list of values for specified attribute
        values = self.get_data(attribute)

        # Sort and select using Schwarzian transform
        A = zip(values, self.data, self.geometry)
        A.sort()

        # Pick top N and unpack
        _, data, geometry = zip(*A[-N:])

        # Create new Vector instance and return
        return Vector(data=data,
                      projection=self.get_projection(),
                      geometry=geometry)
Example #17
0
    def get_topN(self, attribute, N=10):
        """Get top N features

        Input
            attribute: The name of attribute where values are sought
            N: How many

        Output
            layer: New vector layer with selected features
        """

        # FIXME (Ole): Maybe generalise this to arbitrary expressions

        # Input checks
        msg = ('Specfied attribute must be a string. '
               'I got %s' % (type(attribute)))
        verify(isinstance(attribute, basestring), msg)

        msg = 'Specified attribute was empty'
        verify(attribute != '', msg)

        msg = 'N must be a positive number. I got %i' % N
        verify(N > 0, msg)

        # Create list of values for specified attribute
        values = self.get_data(attribute)

        # Sort and select using Schwarzian transform
        A = zip(values, self.data, self.geometry)
        A.sort()

        # Pick top N and unpack
        _, data, geometry = zip(*A[-N:])

        # Create new Vector instance and return
        return Vector(data=data,
                      projection=self.get_projection(),
                      geometry=geometry)
Example #18
0
    def __init__(self, data=None, projection=None, geometry=None,
                 geometry_type=None,
                 name='', keywords=None, style_info=None):
        """Initialise object with either geometry or filename

        Input
            data: Can be either
                * a filename of a vector file format known to GDAL
                * List of dictionaries of fields associated with
                  point coordinates
                * None
            projection: Geospatial reference in WKT format.
                        Only used if geometry is provide as a numeric array,
            geometry: A list of either point coordinates or polygons/lines
                      (see note below)
            geometry_type: Desired interpretation of geometry.
                           Valid options are 'point', 'line', 'polygon' or
                           the ogr types: 1, 2, 3
                           If None, a geometry_type will be inferred
            name: Optional name for layer.
                  Only used if geometry is provide as a numeric array
            keywords: Optional dictionary with keywords that describe the
                      layer. When the layer is stored, these keywords will
                      be written into an associated file with extension
                      .keywords.

                      Keywords can for example be used to display text
                      about the layer in a web application.

        Notes

        If data is a filename, all other arguments are ignored
        as they will be inferred from the file.

        The geometry type will be inferred from the dimensions of geometry.
        If each entry is one set of coordinates the type will be ogr.wkbPoint,
        if it is an array of coordinates the type will be ogr.wkbPolygon.

        Each polygon or line feature take the form of an Nx2 array representing
        vertices where line segments are joined
        """

        if data is None and projection is None and geometry is None:
            # Instantiate empty object
            self.name = name
            self.projection = None
            self.geometry = None
            self.geometry_type = None
            self.filename = None
            self.data = None
            self.extent = None
            self.keywords = {}
            self.style_info = {}
            return

        if isinstance(data, basestring):
            self.read_from_file(data)
        else:
            # Assume that data is provided as sequences provided as
            # arguments to the Vector constructor
            # with extra keyword arguments supplying metadata

            self.name = name
            self.filename = None

            if keywords is None:
                self.keywords = {}
            else:
                msg = ('Specified keywords must be either None or a '
                       'dictionary. I got %s' % keywords)
                verify(isinstance(keywords, dict), msg)
                self.keywords = keywords

            if style_info is None:
                self.style_info = {}
            else:
                msg = ('Specified style_info must be either None or a '
                       'dictionary. I got %s' % style_info)
                verify(isinstance(style_info, dict), msg)
                self.style_info = style_info

            msg = 'Geometry must be specified'
            verify(geometry is not None, msg)

            msg = 'Geometry must be a sequence'
            verify(is_sequence(geometry), msg)
            self.geometry = geometry

            self.geometry_type = get_geometry_type(geometry, geometry_type)

            #msg = 'Projection must be specified'
            #verify(projection is not None, msg)
            self.projection = Projection(projection)

            if data is None:
                # Generate default attribute as OGR will do that anyway
                # when writing
                data = []
                for i in range(len(geometry)):
                    data.append({'ID': i})

            # Check data
            self.data = data
            if data is not None:
                msg = 'Data must be a sequence'
                verify(is_sequence(data), msg)

                msg = ('The number of entries in geometry and data '
                       'must be the same')
                verify(len(geometry) == len(data), msg)
Example #19
0
    def get_data(self, nan=True, scaling=None):
        """Get raster data as numeric array

        Input
            nan: Optional flag controlling handling of missing values.
                 If nan is True (default), nodata values will be replaced
                 with numpy.nan
                 If keyword nan has a numeric value, nodata values will
                 be replaced by that value. E.g. to set missing values to 0,
                 do get_data(nan=0.0)
            scaling: Optional flag controlling if data is to be scaled
                     if it has been resampled. Admissible values are
                     False: data is retrieved without modification.
                     True: Data is rescaled based on the squared ratio between
                           its current and native resolution. This is typically
                           required if raster data represents a density
                           such as population per km^2
                     None: The behaviour will depend on the keyword
                           "population" associated with the layer. If
                           it is "density", scaling will be applied
                           otherwise not. This is the default.
                     scalar value: If scaling takes a numerical scalar value,
                                   that will be use to scale the data

        """

        if hasattr(self, 'data'):
            A = self.data
            verify(A.shape[0] == self.rows and A.shape[1] == self.columns)
        else:
            # Read from raster file
            A = self.band.ReadAsArray()

            # Convert to double precision (issue #75)
            A = numpy.array(A, dtype=numpy.float64)

            # Self check
            M, N = A.shape
            msg = ('Dimensions of raster array do not match those of '
                   'raster file %s' % self.filename)
            verify(M == self.rows, msg)
            verify(N == self.columns, msg)

        # Handle no data value
        if nan is False:
            pass
        else:
            if nan is True:
                NAN = numpy.nan
            else:
                NAN = nan

            # Replace NODATA_VALUE with NaN
            nodata = self.get_nodata_value()

            NaN = numpy.ones(A.shape, A.dtype) * NAN
            A = numpy.where(A == nodata, NaN, A)

        # Take care of possible scaling
        if scaling is None:
            # Redefine scaling from density keyword if possible
            kw = self.get_keywords()
            if 'datatype' in kw and kw['datatype'].lower() == 'density':
                scaling = True
            else:
                scaling = False

        if scaling is False:
            # No change
            sigma = 1
        elif scaling is True:
            # Calculate scaling based on resolution change

            actual_res = self.get_resolution(isotropic=True)
            native_res = self.get_resolution(isotropic=True, native=True)
            #print
            #print 'Actual res', actual_res
            #print 'Native res', native_res
            sigma = (actual_res / native_res) ** 2
            #print 'Scaling', sigma
        else:
            # See if scaling can work as a scalar value
            try:
                sigma = float(scaling)
            except Exception, e:
                msg = ('Keyword scaling "%s" could not be converted to a '
                       'number. It must be either True, False, None or a '
                       'number: %s' % (scaling, str(e)))
                raise Exception(msg)
Example #20
0
    def write_to_file(self, filename):
        """Save vector data to file

        Input
            filename: filename with extension .shp or .gml

        Note, if attribute names are longer than 10 characters they will be
        truncated. This is due to limitations in the shp file driver and has
        to be done here since gdal v1.7 onwards has changed its handling of
        this issue: http://www.gdal.org/ogr/drv_shapefile.html
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extensions '
               'shp or gml allowed.' % filename)
        verify(extension == '.shp' or extension == '.gml', msg)
        driver = DRIVER_MAP[extension]

        # FIXME (Ole): Tempory flagging of GML issue (ticket #18)
        if extension == '.gml':
            msg = ('OGR GML driver does not store geospatial reference.'
                   'This format is disabled for the time being. See '
                   'https://github.com/AIFDR/riab/issues/18')
            raise Exception(msg)

        # Derive layername from filename (excluding preceding dirs)
        layername = os.path.split(basename)[-1]

        # Get vector data
        geometry = self.get_geometry()
        data = self.get_data()

        N = len(geometry)

        # Clear any previous file of this name (ogr does not overwrite)
        try:
            os.remove(filename)
        except:
            pass

        # Create new file with one layer
        drv = ogr.GetDriverByName(driver)
        if drv is None:
            msg = 'OGR driver %s not available' % driver
            raise Exception(msg)

        ds = drv.CreateDataSource(filename)
        if ds is None:
            msg = 'Creation of output file %s failed' % filename
            raise Exception(msg)

        lyr = ds.CreateLayer(layername,
                             self.projection.spatial_reference,
                             self.geometry_type)
        if lyr is None:
            msg = 'Could not create layer %s' % layername
            raise Exception(msg)

        # Define attributes if any
        store_attributes = False
        if data is not None:
            if len(data) > 0:
                try:
                    fields = data[0].keys()
                except:
                    msg = ('Input parameter "attributes" was specified '
                           'but it does not contain dictionaries with '
                           'field information as expected. The first'
                           'element is %s' % data[0])
                    raise Exception(msg)
                else:
                    # Establish OGR types for each element
                    ogrtypes = {}
                    for name in fields:
                        att = data[0][name]
                        py_type = type(att)
                        msg = ('Unknown type for storing vector '
                               'data: %s, %s' % (name, str(py_type)[1:-1]))
                        verify(py_type in TYPE_MAP, msg)
                        ogrtypes[name] = TYPE_MAP[py_type]

            else:
                msg = ('Input parameter "data" was specified '
                       'but appears to be empty')
                raise Exception(msg)

            # Create attribute fields in layer
            store_attributes = True
            for name in fields:
                fd = ogr.FieldDefn(name, ogrtypes[name])
                # FIXME (Ole): Trying to address issue #16
                #              But it doesn't work and
                #              somehow changes the values of MMI in test
                #width = max(128, len(name))
                #print name, width
                #fd.SetWidth(width)

                # Silent handling of warnings like
                # Warning 6: Normalized/laundered field name:
                #'CONTENTS_LOSS_AUD' to 'CONTENTS_L'
                gdal.PushErrorHandler('CPLQuietErrorHandler')
                if lyr.CreateField(fd) != 0:
                    msg = 'Could not create field %s' % name
                    raise Exception(msg)

                # Restore error handler
                gdal.PopErrorHandler()

        # Store geometry
        geom = ogr.Geometry(self.geometry_type)
        layer_def = lyr.GetLayerDefn()
        for i in range(N):
            # Create new feature instance
            feature = ogr.Feature(layer_def)

            # Store geometry and check
            if self.geometry_type == ogr.wkbPoint:
                x = float(geometry[i][0])
                y = float(geometry[i][1])
                geom.SetPoint_2D(0, x, y)
            elif self.geometry_type == ogr.wkbPolygon:
                wkt = array2wkt(geometry[i], geom_type='POLYGON')
                geom = ogr.CreateGeometryFromWkt(wkt)
            elif self.geometry_type == ogr.wkbLineString:
                wkt = array2wkt(geometry[i], geom_type='LINESTRING')
                geom = ogr.CreateGeometryFromWkt(wkt)
            else:
                msg = 'Geometry type %s not implemented' % self.geometry_type
                raise Exception(msg)

            feature.SetGeometry(geom)

            G = feature.GetGeometryRef()
            if G is None:
                msg = 'Could not create GeometryRef for file %s' % filename
                raise Exception(msg)

            # Store attributes
            if store_attributes:
                for j, name in enumerate(fields):
                    actual_field_name = layer_def.GetFieldDefn(j).GetNameRef()

                    val = data[i][name]

                    if type(val) == numpy.ndarray:
                        # A singleton of type <type 'numpy.ndarray'> works
                        # for gdal version 1.6 but fails for version 1.8
                        # in SetField with error: NotImplementedError:
                        # Wrong number of arguments for overloaded function
                        val = float(val)
                    elif val is None:
                        val = ''

                    feature.SetField(actual_field_name, val)

            # Save this feature
            if lyr.CreateFeature(feature) != 0:
                msg = 'Failed to create feature %i in file %s' % (i, filename)
                raise Exception(msg)

            feature.Destroy()

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')
Example #21
0
    def __init__(self, data=None, projection=None, geotransform=None,
                 name='', keywords=None, style_info=None):
        """Initialise object with either data or filename

        Input
            data: Can be either
                * a filename of a raster file format known to GDAL
                * an MxN array of raster data
                * None (FIXME (Ole): Remove this option)
            projection: Geospatial reference in WKT format.
                        Only used if data is provide as a numeric array,
            geotransform: GDAL geotransform (6-tuple).
                          (top left x, w-e pixel resolution, rotation,
                           top left y, rotation, n-s pixel resolution).
                          See e.g. http://www.gdal.org/gdal_tutorial.html
                          Only used if data is provide as a numeric array,
            name: Optional name for layer.
                  Only used if data is provide as a numeric array,
            keywords: Optional dictionary with keywords that describe the
                      layer. When the layer is stored, these keywords will
                      be written into an associated file with extension
                      .keywords.

                      Keywords can for example be used to display text
                      about the layer in a web application.

        Note that if data is a filename, all other arguments are ignored
        as they will be inferred from the file.
        """

        # Input checks
        if data is None:
            # Instantiate empty object
            self.name = name
            self.data = None
            self.projection = None
            self.coordinates = None
            self.filename = None
            self.keywords = {}
            return

        # Initialisation
        if isinstance(data, basestring):
            self.read_from_file(data)
        else:
            # Assume that data is provided as an array
            # with extra keyword arguments supplying metadata
            if keywords is None:
                self.keywords = {}
            else:
                msg = ('Specified keywords must be either None or a '
                       'dictionary. I got %s' % keywords)
                verify(isinstance(keywords, dict), msg)
                self.keywords = keywords

            if style_info is None:
                self.style_info = {}
            else:
                msg = ('Specified style_info must be either None or a '
                       'dictionary. I got %s' % style_info)
                verify(isinstance(style_info, dict), msg)
                self.style_info = style_info

            self.data = numpy.array(data, dtype='d', copy=False)

            self.filename = None
            self.name = name

            self.projection = Projection(projection)
            self.geotransform = geotransform

            self.rows = data.shape[0]
            self.columns = data.shape[1]

            self.number_of_bands = 1
Example #22
0
            raise Exception(msg)

        if native:
            keywords = self.get_keywords()
            if 'resolution' in keywords:

                resolution = keywords['resolution']
                try:
                    res = float(resolution)
                except:
                    # Assume resolution is a string of the form:
                    # (0.00045228819716044, 0.00045228819716044)

                    msg = ('Unknown format for resolution keyword: %s' %
                           resolution)
                    verify((resolution.startswith('(')
                            and resolution.endswith(')')), msg)

                    dx, dy = [float(s) for s in resolution[1:-1].split(',')]
                    if not isotropic:
                        res = (dx, dy)
                    else:
                        msg = ('Resolution of layer "%s" was not isotropic: '
                               '[dx, dy] == %s' % (self.get_name(), res))
                        verify(
                            numpy.allclose(dx, dy, rtol=1.0e-12, atol=1.0e-12),
                            msg)
                        res = dx
                else:
                    if not isotropic:
                        res = (res, res)
Example #23
0
    def write_to_file(self, filename):
        """Save vector data to file

        Input
            filename: filename with extension .shp or .gml

        Note, if attribute names are longer than 10 characters they will be
        truncated. This is due to limitations in the shp file driver and has
        to be done here since gdal v1.7 onwards has changed its handling of
        this issue: http://www.gdal.org/ogr/drv_shapefile.html
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extensions '
               'shp or gml allowed.' % filename)
        verify(extension == '.shp' or extension == '.gml', msg)
        driver = DRIVER_MAP[extension]

        # FIXME (Ole): Tempory flagging of GML issue (ticket #18)
        if extension == '.gml':
            msg = ('OGR GML driver does not store geospatial reference.'
                   'This format is disabled for the time being. See '
                   'https://github.com/AIFDR/riab/issues/18')
            raise Exception(msg)

        # Derive layername from filename (excluding preceding dirs)
        layername = os.path.split(basename)[-1]

        # Get vector data
        geometry = self.get_geometry()
        data = self.get_data()

        N = len(geometry)

        # Clear any previous file of this name (ogr does not overwrite)
        try:
            os.remove(filename)
        except:
            pass

        # Create new file with one layer
        drv = ogr.GetDriverByName(driver)
        if drv is None:
            msg = 'OGR driver %s not available' % driver
            raise Exception(msg)

        ds = drv.CreateDataSource(filename)
        if ds is None:
            msg = 'Creation of output file %s failed' % filename
            raise Exception(msg)

        lyr = ds.CreateLayer(layername, self.projection.spatial_reference,
                             self.geometry_type)
        if lyr is None:
            msg = 'Could not create layer %s' % layername
            raise Exception(msg)

        # Define attributes if any
        store_attributes = False
        if data is not None:
            if len(data) > 0:
                try:
                    fields = data[0].keys()
                except:
                    msg = ('Input parameter "attributes" was specified '
                           'but it does not contain dictionaries with '
                           'field information as expected. The first'
                           'element is %s' % data[0])
                    raise Exception(msg)
                else:
                    # Establish OGR types for each element
                    ogrtypes = {}
                    for name in fields:
                        att = data[0][name]
                        py_type = type(att)
                        msg = ('Unknown type for storing vector '
                               'data: %s, %s' % (name, str(py_type)[1:-1]))
                        verify(py_type in TYPE_MAP, msg)
                        ogrtypes[name] = TYPE_MAP[py_type]

            else:
                msg = ('Input parameter "data" was specified '
                       'but appears to be empty')
                raise Exception(msg)

            # Create attribute fields in layer
            store_attributes = True
            for name in fields:
                fd = ogr.FieldDefn(name, ogrtypes[name])
                # FIXME (Ole): Trying to address issue #16
                #              But it doesn't work and
                #              somehow changes the values of MMI in test
                #width = max(128, len(name))
                #print name, width
                #fd.SetWidth(width)

                # Silent handling of warnings like
                # Warning 6: Normalized/laundered field name:
                #'CONTENTS_LOSS_AUD' to 'CONTENTS_L'
                gdal.PushErrorHandler('CPLQuietErrorHandler')
                if lyr.CreateField(fd) != 0:
                    msg = 'Could not create field %s' % name
                    raise Exception(msg)

                # Restore error handler
                gdal.PopErrorHandler()

        # Store geometry
        geom = ogr.Geometry(self.geometry_type)
        layer_def = lyr.GetLayerDefn()
        for i in range(N):
            # Create new feature instance
            feature = ogr.Feature(layer_def)

            # Store geometry and check
            if self.geometry_type == ogr.wkbPoint:
                x = float(geometry[i][0])
                y = float(geometry[i][1])
                geom.SetPoint_2D(0, x, y)
            elif self.geometry_type == ogr.wkbPolygon:
                wkt = array2wkt(geometry[i], geom_type='POLYGON')
                geom = ogr.CreateGeometryFromWkt(wkt)
            elif self.geometry_type == ogr.wkbLineString:
                wkt = array2wkt(geometry[i], geom_type='LINESTRING')
                geom = ogr.CreateGeometryFromWkt(wkt)
            else:
                msg = 'Geometry type %s not implemented' % self.geometry_type
                raise Exception(msg)

            feature.SetGeometry(geom)

            G = feature.GetGeometryRef()
            if G is None:
                msg = 'Could not create GeometryRef for file %s' % filename
                raise Exception(msg)

            # Store attributes
            if store_attributes:
                for j, name in enumerate(fields):
                    actual_field_name = layer_def.GetFieldDefn(j).GetNameRef()

                    val = data[i][name]

                    if type(val) == numpy.ndarray:
                        # A singleton of type <type 'numpy.ndarray'> works
                        # for gdal version 1.6 but fails for version 1.8
                        # in SetField with error: NotImplementedError:
                        # Wrong number of arguments for overloaded function
                        val = float(val)
                    elif val is None:
                        val = ''

                    feature.SetField(actual_field_name, val)

            # Save this feature
            if lyr.CreateFeature(feature) != 0:
                msg = 'Failed to create feature %i in file %s' % (i, filename)
                raise Exception(msg)

            feature.Destroy()

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')
Example #24
0
def calculate_impact(layers, impact_fcn, comment=''):
    """Calculate impact levels as a function of list of input layers

    Input
        layers: List of Raster and Vector layer objects to be used for analysis

        impact_fcn: Function of the form f(layers)
        comment:

    Output
        filename of resulting impact layer (GML). Comment is embedded as
        metadata. Filename is generated from input data and date.

    Note
        The admissible file types are tif and asc/prj for raster and
        gml or shp for vector data

    Assumptions
        1. All layers are in WGS84 geographic coordinates
        2. Layers are equipped with metadata such as names and categories
    """

    # Input checks
    check_data_integrity(layers)

    # Get an instance of the passed impact_fcn
    impact_function = impact_fcn()

    # Pass input layers to plugin
    F = impact_function.run(layers)

    msg = 'Impact function %s returned None' % str(impact_function)
    verify(F is not None, msg)

    # Write result and return filename
    if F.is_raster:
        extension = '.tif'
        # use default style for raster
    else:
        extension = '.shp'
        # use default style for vector

    output_filename = unique_filename(suffix=extension)
    F.filename = output_filename
    F.write_to_file(output_filename)

    # Establish default name (layer1 X layer1 x impact_function)
    if not F.get_name():
        default_name = ''
        for layer in layers:
            default_name += layer.name + ' X '

        if hasattr(impact_function, 'plugin_name'):
            default_name += impact_function.plugin_name
        else:
            # Strip trailing 'X'
            default_name = default_name[:-2]

        F.set_name(default_name)

    # FIXME (Ole): If we need to save style as defined by the impact_function
    #              this is the place

    # Return layer object
    return F
Example #25
0
def check_data_integrity(layer_objects):
    """Check list of layer objects

    Input
        layer_objects: List of InaSAFE layer instances

    Output
        Nothing

    Raises
        Exceptions for a range of errors

    This function checks that
    * Layers have correct keywords
    * That they have the same georeferences
    """

    # Link to documentation
    manpage = ('http://risiko_dev.readthedocs.org/en/latest/usage/'
               'plugins/development.html')
    instructions = ('Please add keywords as <keyword>:<value> pairs '
                    ' in the .keywords file. For more information '
                    'please read the sections on impact functions '
                    'and keywords in the manual: %s' % manpage)

    # Set default values for projection and geotransform.
    # Enforce DEFAULT (WGS84).
    # Choosing 'None' will use value of first layer.
    reference_projection = Projection(DEFAULT_PROJECTION)
    geotransform = None
    coordinates = None

    for layer in layer_objects:

        # Check that critical keywords exist and are non empty
        keywords = layer.get_keywords()
        for kw in REQUIRED_KEYWORDS:
            msg = ('Layer %s did not have required keyword "%s". '
                   '%s' % (layer.name, kw, instructions))
            verify(kw in keywords, msg)

            val = keywords[kw]
            msg = ('No value found for keyword "%s" in layer %s. '
                   '%s' % (kw, layer.name, instructions))
            verify(val, msg)

        # Ensure that projection is consistent across all layers
        if reference_projection is None:
            reference_projection = layer.projection
        else:
            msg = ('Projections in input layer %s is not as expected:\n'
                   'projection: %s\n'
                   'default:    %s'
                   '' % (layer, layer.projection, reference_projection))
            verify(reference_projection == layer.projection, msg)

        # Ensure that geotransform and dimensions is consistent across
        # all *raster* layers
        if layer.is_raster:
            if geotransform is None:
                geotransform = layer.get_geotransform()
            else:
                msg = ('Geotransforms in input raster layers are different: '
                       '%s %s' % (geotransform, layer.get_geotransform()))
                verify(numpy.allclose(geotransform,
                                      layer.get_geotransform(),
                                      rtol=1.0e-12), msg)

        # In case of vector layers, we just check that they are non-empty
        # FIXME (Ole): Not good as nasty error is raised in cases where
        # there are no buildings in the hazard area. Need to be more graceful
        # See e.g. shakemap dated 20120227190230
        if layer.is_vector:
            msg = ('There are no vector data features. '
                   'Perhaps zoom out or pan to the study area '
                   'and try again')
            verify(len(layer) > 0, msg)

    # Check that arrays are aligned.

    refname = None
    for layer in layer_objects:
        if layer.is_raster:

            if refname is None:
                refname = layer.get_name()
                M = layer.rows
                N = layer.columns

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i rows but raster %s has %i rows\n'
                   'Refer to issue #102' % (layer.get_name(),
                                            layer.rows,
                                            refname, M))
            verify(layer.rows == M, msg)

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i columns but raster %s has %i columns\n'
                   'Refer to issue #102' % (layer.get_name(),
                                            layer.columns,
                                            refname, N))
            verify(layer.columns == N, msg)
Example #26
0
def interpolate_raster_vector_points(R, V, name=None):
    """Interpolate from raster layer to point data

    Input
        R: Raster data set (grid)
        V: Vector data set (points)
        name: Name for new attribute.
              If None (default) the name of R is used

    Output
        I: Vector data set; points located as V with values interpolated from R

    """

    msg = ('There are no data points to interpolate to. Perhaps zoom out '
           'and try again')
    verify(len(V) > 0, msg)

    # Input checks
    verify(R.is_raster)
    verify(V.is_vector)
    verify(V.is_point_data)

    # Get raster data and corresponding x and y axes
    A = R.get_data(nan=True)
    longitudes, latitudes = R.get_geometry()
    verify(len(longitudes) == A.shape[1])
    verify(len(latitudes) == A.shape[0])

    # Get vector point geometry as Nx2 array
    coordinates = numpy.array(V.get_geometry(),
                              dtype='d',
                              copy=False)

    # Interpolate and create new attribute
    N = len(V)
    attributes = []
    if name is None:
        name = R.get_name()

    values = interpolate_raster(longitudes, latitudes, A,
                                coordinates, mode='linear')

    # Create list of dictionaries for this attribute and return
    for i in range(N):
        attributes.append({name: values[i]})

    return Vector(data=attributes, projection=V.get_projection(),
                  geometry=coordinates)
Example #27
0
    def interpolate(self, X, name=None, attribute=None):
        """Interpolate values of this vector layer to other layer

        Input
            X: Layer object defining target
            name: Optional name of interpolated layer
            attribute: Optional attribute name to use.
                       If None, all attributes are used.
                       FIXME (Ole): Single attribute not tested well yet and
                                    not implemented for lines

        Output
            Y: Layer object with values of this vector layer interpolated to
               geometry of input layer X
        """

        msg = 'Input to Vector.interpolate must be a vector layer instance'
        verify(X.is_vector, msg)

        X_projection = X.get_projection()
        S_projection = self.get_projection()

        msg = ('Projections must be the same: I got %s and %s'
               % (S_projection, X_projection))
        verify(S_projection == X_projection, msg)

        msg = ('Vector layer to interpolate from must be polygon geometry. '
               'I got OGR geometry type %s'
               % geometrytype2string(self.geometry_type))
        verify(self.is_polygon_data, msg)

        # FIXME (Ole): Maybe organise this the same way it is done with rasters
        # FIXME (Ole): Retain original geometry to use with returned data here
        if X.is_polygon_data:
            # Use centroids, in case of polygons
            X = convert_polygons_to_centroids(X)
        elif X.is_line_data:

            # Clip lines to polygon and return centroids

            # FIXME (Ole): Need to separate this out, but identify what is
            #              common with points and lines
            #

            #X.write_to_file('line_data.shp')
            #self.write_to_file('poly_data.shp')

            # Extract line features
            lines = X.get_geometry()
            line_attributes = X.get_data()
            N = len(X)
            verify(len(lines) == N)
            verify(len(line_attributes) == N)

            # Extract polygon features
            polygons = self.get_geometry()
            poly_attributes = self.get_data()
            verify(len(polygons) == len(poly_attributes))

            # Data structure for resulting line segments
            clipped_geometry = []
            clipped_attributes = []

            # Clip line lines to polygons
            for i, polygon in enumerate(polygons):
                for j, line in enumerate(lines):
                    inside, outside = clip_line_by_polygon(line, polygon)

                    # Create new attributes
                    # FIXME (Ole): Not done single specified polygon
                    #              attribute
                    inside_attributes = {}
                    outside_attributes = {}
                    for key in line_attributes[j]:
                        inside_attributes[key] = line_attributes[j][key]
                        outside_attributes[key] = line_attributes[j][key]

                    for key in poly_attributes[i]:
                        inside_attributes[key] = poly_attributes[i][key]
                        outside_attributes[key] = None

                    # Always create default attribute flagging if segment was
                    # inside any of the polygons
                    inside_attributes[DEFAULT_ATTRIBUTE] = True
                    outside_attributes[DEFAULT_ATTRIBUTE] = False

                    # Assign new attribute set to clipped lines
                    for segment in inside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(inside_attributes)

                    for segment in outside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(outside_attributes)

            # Create new Vector instance and return
            V = Vector(data=clipped_attributes,
                       projection=X.get_projection(),
                       geometry=clipped_geometry,
                       geometry_type='line')
            #V.write_to_file('clipped_and_tagged.shp')
            return V

        # The following applies only to Polygon-Point interpolation
        msg = ('Vector layer to interpolate to must be point geometry. '
               'I got OGR geometry type %s'
               % geometrytype2string(X.geometry_type))
        verify(X.is_point_data, msg)

        msg = ('Name must be either a string or None. I got %s'
               % (str(type(X)))[1:-1])
        verify(name is None or isinstance(name, basestring), msg)

        msg = ('Attribute must be either a string or None. I got %s'
               % (str(type(X)))[1:-1])
        verify(attribute is None or isinstance(attribute, basestring), msg)

        attribute_names = self.get_attribute_names()
        if attribute is not None:
            msg = ('Requested attribute "%s" did not exist in %s'
                   % (attribute, attribute_names))
            verify(attribute in attribute_names, msg)

        #----------------
        # Start algorithm
        #----------------

        # Extract point features
        points = ensure_numeric(X.get_geometry())
        attributes = X.get_data()
        N = len(X)

        # Extract polygon features
        geom = self.get_geometry()
        data = self.get_data()
        verify(len(geom) == len(data))

        # Augment point features with empty attributes from polygon
        for a in attributes:
            if attribute is None:
                # Use all attributes
                for key in attribute_names:
                    a[key] = None
            else:
                # Use only requested attribute
                # FIXME (Ole): Test for this is not finished
                a[attribute] = None

            # Always create default attribute flagging if point was
            # inside any of the polygons
            a[DEFAULT_ATTRIBUTE] = None

        # Traverse polygons and assign attributes to points that fall inside
        for i, polygon in enumerate(geom):
            if attribute is None:
                # Use all attributes
                poly_attr = data[i]
            else:
                # Use only requested attribute
                poly_attr = {attribute: data[i][attribute]}

            # Assign default attribute to indicate points inside
            poly_attr[DEFAULT_ATTRIBUTE] = True

            # Clip data points by polygons and add polygon attributes
            indices = inside_polygon(points, polygon)
            for k in indices:
                for key in poly_attr:
                    # Assign attributes from polygon to points
                    attributes[k][key] = poly_attr[key]

        # Create new Vector instance and return
        V = Vector(data=attributes,
                   projection=X.get_projection(),
                   geometry=X.get_geometry())
        return V
Example #28
0
    def get_data(self, nan=True, scaling=None):
        """Get raster data as numeric array

        Input
            nan: Optional flag controlling handling of missing values.
                 If nan is True (default), nodata values will be replaced
                 with numpy.nan
                 If keyword nan has a numeric value, nodata values will
                 be replaced by that value. E.g. to set missing values to 0,
                 do get_data(nan=0.0)
            scaling: Optional flag controlling if data is to be scaled
                     if it has been resampled. Admissible values are
                     False: data is retrieved without modification.
                     True: Data is rescaled based on the squared ratio between
                           its current and native resolution. This is typically
                           required if raster data represents a density
                           such as population per km^2
                     None: The behaviour will depend on the keyword
                           "population" associated with the layer. If
                           it is "density", scaling will be applied
                           otherwise not. This is the default.
                     scalar value: If scaling takes a numerical scalar value,
                                   that will be use to scale the data

        """

        if hasattr(self, 'data'):
            A = self.data
            verify(A.shape[0] == self.rows and A.shape[1] == self.columns)
        else:
            # Read from raster file
            A = self.band.ReadAsArray()

            # Convert to double precision (issue #75)
            A = numpy.array(A, dtype=numpy.float64)

            # Self check
            M, N = A.shape
            msg = ('Dimensions of raster array do not match those of '
                   'raster file %s' % self.filename)
            verify(M == self.rows, msg)
            verify(N == self.columns, msg)

        # Handle no data value
        if nan is False:
            pass
        else:
            if nan is True:
                NAN = numpy.nan
            else:
                NAN = nan

            # Replace NODATA_VALUE with NaN
            nodata = self.get_nodata_value()

            NaN = numpy.ones(A.shape, A.dtype) * NAN
            A = numpy.where(A == nodata, NaN, A)

        # Take care of possible scaling
        if scaling is None:
            # Redefine scaling from density keyword if possible
            kw = self.get_keywords()
            if 'datatype' in kw and kw['datatype'].lower() == 'density':
                scaling = True
            else:
                scaling = False

        if scaling is False:
            # No change
            sigma = 1
        elif scaling is True:
            # Calculate scaling based on resolution change

            actual_res = self.get_resolution(isotropic=True)
            native_res = self.get_resolution(isotropic=True, native=True)
            #print
            #print 'Actual res', actual_res
            #print 'Native res', native_res
            sigma = (actual_res / native_res)**2
            #print 'Scaling', sigma
        else:
            # See if scaling can work as a scalar value
            try:
                sigma = float(scaling)
            except Exception, e:
                msg = ('Keyword scaling "%s" could not be converted to a '
                       'number. It must be either True, False, None or a '
                       'number: %s' % (scaling, str(e)))
                raise Exception(msg)
Example #29
0
    def __init__(self,
                 data=None,
                 projection=None,
                 geometry=None,
                 geometry_type=None,
                 name='',
                 keywords=None,
                 style_info=None):
        """Initialise object with either geometry or filename

        Input
            data: Can be either
                * a filename of a vector file format known to GDAL
                * List of dictionaries of fields associated with
                  point coordinates
                * None
            projection: Geospatial reference in WKT format.
                        Only used if geometry is provide as a numeric array,
            geometry: A list of either point coordinates or polygons/lines
                      (see note below)
            geometry_type: Desired interpretation of geometry.
                           Valid options are 'point', 'line', 'polygon' or
                           the ogr types: 1, 2, 3
                           If None, a geometry_type will be inferred
            name: Optional name for layer.
                  Only used if geometry is provide as a numeric array
            keywords: Optional dictionary with keywords that describe the
                      layer. When the layer is stored, these keywords will
                      be written into an associated file with extension
                      .keywords.

                      Keywords can for example be used to display text
                      about the layer in a web application.

        Notes

        If data is a filename, all other arguments are ignored
        as they will be inferred from the file.

        The geometry type will be inferred from the dimensions of geometry.
        If each entry is one set of coordinates the type will be ogr.wkbPoint,
        if it is an array of coordinates the type will be ogr.wkbPolygon.

        Each polygon or line feature take the form of an Nx2 array representing
        vertices where line segments are joined
        """

        if data is None and projection is None and geometry is None:
            # Instantiate empty object
            self.name = name
            self.projection = None
            self.geometry = None
            self.geometry_type = None
            self.filename = None
            self.data = None
            self.extent = None
            self.keywords = {}
            self.style_info = {}
            return

        if isinstance(data, basestring):
            self.read_from_file(data)
        else:
            # Assume that data is provided as sequences provided as
            # arguments to the Vector constructor
            # with extra keyword arguments supplying metadata

            self.name = name
            self.filename = None

            if keywords is None:
                self.keywords = {}
            else:
                msg = ('Specified keywords must be either None or a '
                       'dictionary. I got %s' % keywords)
                verify(isinstance(keywords, dict), msg)
                self.keywords = keywords

            if style_info is None:
                self.style_info = {}
            else:
                msg = ('Specified style_info must be either None or a '
                       'dictionary. I got %s' % style_info)
                verify(isinstance(style_info, dict), msg)
                self.style_info = style_info

            msg = 'Geometry must be specified'
            verify(geometry is not None, msg)

            msg = 'Geometry must be a sequence'
            verify(is_sequence(geometry), msg)
            self.geometry = geometry

            self.geometry_type = get_geometry_type(geometry, geometry_type)

            #msg = 'Projection must be specified'
            #verify(projection is not None, msg)
            self.projection = Projection(projection)

            if data is None:
                # Generate default attribute as OGR will do that anyway
                # when writing
                data = []
                for i in range(len(geometry)):
                    data.append({'ID': i})

            # Check data
            self.data = data
            if data is not None:
                msg = 'Data must be a sequence'
                verify(is_sequence(data), msg)

                msg = ('The number of entries in geometry and data '
                       'must be the same')
                verify(len(geometry) == len(data), msg)
Example #30
0
def check_data_integrity(layer_objects):
    """Check list of layer objects

    Input
        layer_objects: List of InaSAFE layer instances

    Output
        Nothing

    Raises
        Exceptions for a range of errors

    This function checks that
    * Layers have correct keywords
    * That they have the same georeferences
    """

    # Link to documentation
    manpage = ('http://risiko_dev.readthedocs.org/en/latest/usage/'
               'plugins/development.html')
    instructions = ('Please add keywords as <keyword>:<value> pairs '
                    ' in the .keywords file. For more information '
                    'please read the sections on impact functions '
                    'and keywords in the manual: %s' % manpage)

    # Set default values for projection and geotransform.
    # Enforce DEFAULT (WGS84).
    # Choosing 'None' will use value of first layer.
    reference_projection = Projection(DEFAULT_PROJECTION)
    geotransform = None
    coordinates = None

    for layer in layer_objects:

        # Check that critical keywords exist and are non empty
        keywords = layer.get_keywords()
        for kw in REQUIRED_KEYWORDS:
            msg = ('Layer %s did not have required keyword "%s". '
                   '%s' % (layer.name, kw, instructions))
            verify(kw in keywords, msg)

            val = keywords[kw]
            msg = ('No value found for keyword "%s" in layer %s. '
                   '%s' % (kw, layer.name, instructions))
            verify(val, msg)

        # Ensure that projection is consistent across all layers
        if reference_projection is None:
            reference_projection = layer.projection
        else:
            msg = ('Projections in input layer %s is not as expected:\n'
                   'projection: %s\n'
                   'default:    %s'
                   '' % (layer, layer.projection, reference_projection))
            verify(reference_projection == layer.projection, msg)

        # Ensure that geotransform and dimensions is consistent across
        # all *raster* layers
        if layer.is_raster:
            if geotransform is None:
                geotransform = layer.get_geotransform()
            else:
                msg = ('Geotransforms in input raster layers are different: '
                       '%s %s' % (geotransform, layer.get_geotransform()))
                verify(
                    numpy.allclose(geotransform,
                                   layer.get_geotransform(),
                                   rtol=1.0e-12), msg)

        # In case of vector layers, we just check that they are non-empty
        # FIXME (Ole): Not good as nasty error is raised in cases where
        # there are no buildings in the hazard area. Need to be more graceful
        # See e.g. shakemap dated 20120227190230
        if layer.is_vector:
            msg = ('There are no vector data features. '
                   'Perhaps zoom out or pan to the study area '
                   'and try again')
            verify(len(layer) > 0, msg)

    # Check that arrays are aligned.

    refname = None
    for layer in layer_objects:
        if layer.is_raster:

            if refname is None:
                refname = layer.get_name()
                M = layer.rows
                N = layer.columns

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i rows but raster %s has %i rows\n'
                   'Refer to issue #102' %
                   (layer.get_name(), layer.rows, refname, M))
            verify(layer.rows == M, msg)

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i columns but raster %s has %i columns\n'
                   'Refer to issue #102' %
                   (layer.get_name(), layer.columns, refname, N))
            verify(layer.columns == N, msg)
Example #31
0
            raise Exception(msg)

        if native:
            keywords = self.get_keywords()
            if 'resolution' in keywords:

                resolution = keywords['resolution']
                try:
                    res = float(resolution)
                except:
                    # Assume resolution is a string of the form:
                    # (0.00045228819716044, 0.00045228819716044)

                    msg = ('Unknown format for resolution keyword: %s'
                           % resolution)
                    verify((resolution.startswith('(') and
                            resolution.endswith(')')), msg)

                    dx, dy = [float(s) for s in resolution[1:-1].split(',')]
                    if not isotropic:
                        res = (dx, dy)
                    else:
                        msg = ('Resolution of layer "%s" was not isotropic: '
                               '[dx, dy] == %s' % (self.get_name(), res))
                        verify(numpy.allclose(dx, dy,
                                              rtol=1.0e-12, atol=1.0e-12), msg)
                        res = dx
                else:
                    if not isotropic:
                        res = (res, res)

        # Return either 2-tuple or scale depending on isotropic
Example #32
0
    def __init__(self,
                 data=None,
                 projection=None,
                 geotransform=None,
                 name='',
                 keywords=None,
                 style_info=None):
        """Initialise object with either data or filename

        Input
            data: Can be either
                * a filename of a raster file format known to GDAL
                * an MxN array of raster data
                * None (FIXME (Ole): Remove this option)
            projection: Geospatial reference in WKT format.
                        Only used if data is provide as a numeric array,
            geotransform: GDAL geotransform (6-tuple).
                          (top left x, w-e pixel resolution, rotation,
                           top left y, rotation, n-s pixel resolution).
                          See e.g. http://www.gdal.org/gdal_tutorial.html
                          Only used if data is provide as a numeric array,
            name: Optional name for layer.
                  Only used if data is provide as a numeric array,
            keywords: Optional dictionary with keywords that describe the
                      layer. When the layer is stored, these keywords will
                      be written into an associated file with extension
                      .keywords.

                      Keywords can for example be used to display text
                      about the layer in a web application.

        Note that if data is a filename, all other arguments are ignored
        as they will be inferred from the file.
        """

        # Input checks
        if data is None:
            # Instantiate empty object
            self.name = name
            self.data = None
            self.projection = None
            self.coordinates = None
            self.filename = None
            self.keywords = {}
            return

        # Initialisation
        if isinstance(data, basestring):
            self.read_from_file(data)
        else:
            # Assume that data is provided as an array
            # with extra keyword arguments supplying metadata
            if keywords is None:
                self.keywords = {}
            else:
                msg = ('Specified keywords must be either None or a '
                       'dictionary. I got %s' % keywords)
                verify(isinstance(keywords, dict), msg)
                self.keywords = keywords

            if style_info is None:
                self.style_info = {}
            else:
                msg = ('Specified style_info must be either None or a '
                       'dictionary. I got %s' % style_info)
                verify(isinstance(style_info, dict), msg)
                self.style_info = style_info

            self.data = numpy.array(data, dtype='d', copy=False)

            self.filename = None
            self.name = name

            self.projection = Projection(projection)
            self.geotransform = geotransform

            self.rows = data.shape[0]
            self.columns = data.shape[1]

            self.number_of_bands = 1
Example #33
0
    def interpolate(self, X, name=None, attribute=None):
        """Interpolate values of this vector layer to other layer

        Input
            X: Layer object defining target
            name: Optional name of interpolated layer
            attribute: Optional attribute name to use.
                       If None, all attributes are used.
                       FIXME (Ole): Single attribute not tested well yet and
                                    not implemented for lines

        Output
            Y: Layer object with values of this vector layer interpolated to
               geometry of input layer X
        """

        msg = 'Input to Vector.interpolate must be a vector layer instance'
        verify(X.is_vector, msg)

        X_projection = X.get_projection()
        S_projection = self.get_projection()

        msg = ('Projections must be the same: I got %s and %s' %
               (S_projection, X_projection))
        verify(S_projection == X_projection, msg)

        msg = ('Vector layer to interpolate from must be polygon geometry. '
               'I got OGR geometry type %s' %
               geometrytype2string(self.geometry_type))
        verify(self.is_polygon_data, msg)

        # FIXME (Ole): Maybe organise this the same way it is done with rasters
        # FIXME (Ole): Retain original geometry to use with returned data here
        if X.is_polygon_data:
            # Use centroids, in case of polygons
            X = convert_polygons_to_centroids(X)
        elif X.is_line_data:

            # Clip lines to polygon and return centroids

            # FIXME (Ole): Need to separate this out, but identify what is
            #              common with points and lines
            #

            #X.write_to_file('line_data.shp')
            #self.write_to_file('poly_data.shp')

            # Extract line features
            lines = X.get_geometry()
            line_attributes = X.get_data()
            N = len(X)
            verify(len(lines) == N)
            verify(len(line_attributes) == N)

            # Extract polygon features
            polygons = self.get_geometry()
            poly_attributes = self.get_data()
            verify(len(polygons) == len(poly_attributes))

            # Data structure for resulting line segments
            clipped_geometry = []
            clipped_attributes = []

            # Clip line lines to polygons
            for i, polygon in enumerate(polygons):
                for j, line in enumerate(lines):
                    inside, outside = clip_line_by_polygon(line, polygon)

                    # Create new attributes
                    # FIXME (Ole): Not done single specified polygon
                    #              attribute
                    inside_attributes = {}
                    outside_attributes = {}
                    for key in line_attributes[j]:
                        inside_attributes[key] = line_attributes[j][key]
                        outside_attributes[key] = line_attributes[j][key]

                    for key in poly_attributes[i]:
                        inside_attributes[key] = poly_attributes[i][key]
                        outside_attributes[key] = None

                    # Always create default attribute flagging if segment was
                    # inside any of the polygons
                    inside_attributes[DEFAULT_ATTRIBUTE] = True
                    outside_attributes[DEFAULT_ATTRIBUTE] = False

                    # Assign new attribute set to clipped lines
                    for segment in inside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(inside_attributes)

                    for segment in outside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(outside_attributes)

            # Create new Vector instance and return
            V = Vector(data=clipped_attributes,
                       projection=X.get_projection(),
                       geometry=clipped_geometry,
                       geometry_type='line')
            #V.write_to_file('clipped_and_tagged.shp')
            return V

        # The following applies only to Polygon-Point interpolation
        msg = ('Vector layer to interpolate to must be point geometry. '
               'I got OGR geometry type %s' %
               geometrytype2string(X.geometry_type))
        verify(X.is_point_data, msg)

        msg = ('Name must be either a string or None. I got %s' %
               (str(type(X)))[1:-1])
        verify(name is None or isinstance(name, basestring), msg)

        msg = ('Attribute must be either a string or None. I got %s' %
               (str(type(X)))[1:-1])
        verify(attribute is None or isinstance(attribute, basestring), msg)

        attribute_names = self.get_attribute_names()
        if attribute is not None:
            msg = ('Requested attribute "%s" did not exist in %s' %
                   (attribute, attribute_names))
            verify(attribute in attribute_names, msg)

        #----------------
        # Start algorithm
        #----------------

        # Extract point features
        points = ensure_numeric(X.get_geometry())
        attributes = X.get_data()
        N = len(X)

        # Extract polygon features
        geom = self.get_geometry()
        data = self.get_data()
        verify(len(geom) == len(data))

        # Augment point features with empty attributes from polygon
        for a in attributes:
            if attribute is None:
                # Use all attributes
                for key in attribute_names:
                    a[key] = None
            else:
                # Use only requested attribute
                # FIXME (Ole): Test for this is not finished
                a[attribute] = None

            # Always create default attribute flagging if point was
            # inside any of the polygons
            a[DEFAULT_ATTRIBUTE] = None

        # Traverse polygons and assign attributes to points that fall inside
        for i, polygon in enumerate(geom):
            if attribute is None:
                # Use all attributes
                poly_attr = data[i]
            else:
                # Use only requested attribute
                poly_attr = {attribute: data[i][attribute]}

            # Assign default attribute to indicate points inside
            poly_attr[DEFAULT_ATTRIBUTE] = True

            # Clip data points by polygons and add polygon attributes
            indices = inside_polygon(points, polygon)
            for k in indices:
                for key in poly_attr:
                    # Assign attributes from polygon to points
                    attributes[k][key] = poly_attr[key]

        # Create new Vector instance and return
        V = Vector(data=attributes,
                   projection=X.get_projection(),
                   geometry=X.get_geometry())
        return V
Example #34
0
    def run(layers):
        """Risk plugin for earthquake fatalities

        Input
          layers: List of layers expected to contain
              H: Raster layer of flood depth
              P: Raster layer of population data on the same grid as H
        """

        # Depth above which people are regarded affected [m]
        threshold = 0.1
        thresholds = [0.1, 0.2, 0.3, 0.5, 0.8, 1.0]

        # Identify hazard and exposure layers
        inundation = get_hazard_layer(layers)  # Flood inundation [m]

        # Get population and gender ratio
        population = gender_ratio = None
        for layer in get_exposure_layers(layers):
            keywords = layer.get_keywords()

            if 'datatype' not in keywords:
                population = layer
            else:
                datatype = keywords['datatype']

                if 'ratio' not in datatype:
                    population = layer
                else:
                    # if 'female' in datatype and 'ratio' in datatype:
                    gender_ratio_unit = keywords['unit']

                    msg = ('Unit for gender ratio must be either '
                           '"percent" or "ratio"')
                    if gender_ratio_unit not in ['percent', 'ratio']:
                        raise Exception(msg)

                    gender_ratio = layer

        msg = 'No population layer was found in: %s' % str(layers)
        verify(population is not None, msg)

        # Extract data as numeric arrays
        D = inundation.get_data(nan=0.0)  # Depth

        # Calculate impact as population exposed to depths > threshold
        if population.get_resolution(native=True, isotropic=True) < 0.0005:
            # Keep this for backwards compatibility just a little while
            # This uses the original custom population set and
            # serves as a reference

            P = population.get_data(nan=0.0)  # Population density
            pixel_area = 2500
            I = numpy.where(D > threshold, P, 0) / 100000.0 * pixel_area
        else:
            # This is the new generic way of scaling (issue #168 and #172)
            P = population.get_data(nan=0.0, scaling=True)
            I = numpy.where(D > threshold, P, 0)

        if gender_ratio is not None:
            # Extract gender ratio at each pixel (as ratio)
            G = gender_ratio.get_data(nan=0.0)
            if gender_ratio_unit == 'percent':
                G /= 100

            # Calculate breakdown
            P_female = P * G
            P_male = P - P_female

            I_female = I * G
            I_male = I - I_female

        # Generate text with result for this study
        total = str(int(sum(P.flat) / 1000))
        count = str(int(sum(I.flat) / 1000))

        # Create report
        impact_summary = ('<table border="0" width="320px">'
                          '   <tr><td><b>%s&#58;</b></td>'
                          '<td align="right"><b>%s</b></td></tr>' %
                          ('Jumlah Penduduk', total))
        if gender_ratio is not None:
            total_female = str(int(sum(P_female.flat) / 1000))
            total_male = str(int(sum(P_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                               '<td align="right">%s</td></tr>' %
                               (' - Wanita', total_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                               '<td align="right">%s</td></tr>' %
                               (' - Pria', total_male))
            impact_summary += '<tr><td>&nbsp;</td></tr>'  # Blank row

        impact_summary += (
            '   <tr><td><b>%s&#58;</b></td>'
            '<td align="right"><b>%s</b></td></tr>' %
            ('Perkiraan Jumlah Terdampak (> %.1fm)' % threshold, count))

        if gender_ratio is not None:
            affected_female = str(int(sum(I_female.flat) / 1000))
            affected_male = str(int(sum(I_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                               '<td align="right">%s</td></tr>' %
                               (' - Wanita', affected_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                               '<td align="right">%s</td></tr>' %
                               (' - Pria', affected_male))

        impact_summary += '</table>'

        impact_summary += '<br>'  # Blank separation row
        impact_summary += 'Catatan&#58; Semua nomor x 1000'

        # Create raster object and return
        R = Raster(I,
                   projection=inundation.get_projection(),
                   geotransform=inundation.get_geotransform(),
                   name='People affected',
                   keywords={'impact_summary': impact_summary})
        return R
Example #35
0
def calculate_impact(layers, impact_fcn,
                     comment=''):
    """Calculate impact levels as a function of list of input layers

    Input
        layers: List of Raster and Vector layer objects to be used for analysis

        impact_fcn: Function of the form f(layers)
        comment:

    Output
        filename of resulting impact layer (GML). Comment is embedded as
        metadata. Filename is generated from input data and date.

    Note
        The admissible file types are tif and asc/prj for raster and
        gml or shp for vector data

    Assumptions
        1. All layers are in WGS84 geographic coordinates
        2. Layers are equipped with metadata such as names and categories
    """

    # Input checks
    check_data_integrity(layers)

    # Get an instance of the passed impact_fcn
    impact_function = impact_fcn()

    # Pass input layers to plugin
    F = impact_function.run(layers)

    msg = 'Impact function %s returned None' % str(impact_function)
    verify(F is not None, msg)

    # Write result and return filename
    if F.is_raster:
        extension = '.tif'
        # use default style for raster
    else:
        extension = '.shp'
        # use default style for vector

    output_filename = unique_filename(suffix=extension)
    F.filename = output_filename
    F.write_to_file(output_filename)

    # Establish default name (layer1 X layer1 x impact_function)
    if not F.get_name():
        default_name = ''
        for layer in layers:
            default_name += layer.name + ' X '

        if hasattr(impact_function, 'plugin_name'):
            default_name += impact_function.plugin_name
        else:
            # Strip trailing 'X'
            default_name = default_name[:-2]

        F.set_name(default_name)

    # FIXME (Ole): If we need to save style as defined by the impact_function
    #              this is the place

    # Return layer object
    return F
Example #36
0
    def run(layers):
        """Risk plugin for earthquake fatalities

        Input
          layers: List of layers expected to contain
              H: Raster layer of flood depth
              P: Raster layer of population data on the same grid as H
        """

        # Depth above which people are regarded affected [m]
        threshold = 0.1
        thresholds = [0.1, 0.2, 0.3, 0.5, 0.8, 1.0]

        # Identify hazard and exposure layers
        inundation = get_hazard_layer(layers)  # Flood inundation [m]

        # Get population and gender ratio
        population = gender_ratio = None
        for layer in get_exposure_layers(layers):
            keywords = layer.get_keywords()

            if 'datatype' not in keywords:
                population = layer
            else:
                datatype = keywords['datatype']

                if 'ratio' not in datatype:
                    population = layer
                else:
                    # if 'female' in datatype and 'ratio' in datatype:
                    gender_ratio_unit = keywords['unit']

                    msg = ('Unit for gender ratio must be either '
                           '"percent" or "ratio"')
                    if gender_ratio_unit not in ['percent', 'ratio']:
                        raise Exception(msg)

                    gender_ratio = layer

        msg = 'No population layer was found in: %s' % str(layers)
        verify(population is not None, msg)

        # Extract data as numeric arrays
        D = inundation.get_data(nan=0.0)  # Depth

        # Calculate impact as population exposed to depths > threshold
        if population.get_resolution(native=True, isotropic=True) < 0.0005:
            # Keep this for backwards compatibility just a little while
            # This uses the original custom population set and
            # serves as a reference

            P = population.get_data(nan=0.0)  # Population density
            pixel_area = 2500
            I = numpy.where(D > threshold, P, 0) / 100000.0 * pixel_area
        else:
            # This is the new generic way of scaling (issue #168 and #172)
            P = population.get_data(nan=0.0, scaling=True)
            I = numpy.where(D > threshold, P, 0)

        if gender_ratio is not None:
            # Extract gender ratio at each pixel (as ratio)
            G = gender_ratio.get_data(nan=0.0)
            if gender_ratio_unit == 'percent':
                G /= 100

            # Calculate breakdown
            P_female = P * G
            P_male = P - P_female

            I_female = I * G
            I_male = I - I_female

        # Generate text with result for this study
        total = str(int(sum(P.flat) / 1000))
        count = str(int(sum(I.flat) / 1000))

        # Create report
        impact_summary = ('<table border="0" width="320px">'
                   '   <tr><td><b>%s&#58;</b></td>'
                   '<td align="right"><b>%s</b></td></tr>'
                   % ('Jumlah Penduduk', total))
        if gender_ratio is not None:
            total_female = str(int(sum(P_female.flat) / 1000))
            total_male = str(int(sum(P_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Wanita', total_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Pria', total_male))
            impact_summary += '<tr><td>&nbsp;</td></tr>'  # Blank row

        impact_summary += ('   <tr><td><b>%s&#58;</b></td>'
                    '<td align="right"><b>%s</b></td></tr>'
                    % ('Perkiraan Jumlah Terdampak (> %.1fm)' % threshold,
                       count))

        if gender_ratio is not None:
            affected_female = str(int(sum(I_female.flat) / 1000))
            affected_male = str(int(sum(I_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Wanita', affected_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Pria', affected_male))

        impact_summary += '</table>'

        impact_summary += '<br>'  # Blank separation row
        impact_summary += 'Catatan&#58; Semua nomor x 1000'

        # Create raster object and return
        R = Raster(I,
                   projection=inundation.get_projection(),
                   geotransform=inundation.get_geotransform(),
                   name='People affected',
                   keywords={'impact_summary': impact_summary})
        return R