Beispiel #1
0
def convert_polygons_to_centroids(V):
    """Convert polygon vector data to point vector data

    Input
        V: Vector layer with polygon data

    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be polygon vector data' % V
    verify(V.is_polygon_data, msg)

    geometry = V.get_geometry()
    N = len(V)

    # Calculate points for each polygon
    centroids = []
    for i in range(N):
        c = calculate_polygon_centroid(geometry[i])
        centroids.append(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=V.get_data(),
               projection=V.get_projection(),
               geometry=centroids,
               name='%s_centroid_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Beispiel #2
0
def bboxlist2string(bbox, decimals=6):
    """Convert bounding box list to comma separated string

    Input
        bbox: List of coordinates of the form [W, S, E, N]
    Output
        bbox_string: Format 'W,S,E,N' - each will have 6 decimal points
    """

    msg = 'Got string %s, but expected bounding box as a list' % str(bbox)
    verify(not isinstance(bbox, basestring), msg)

    try:
        bbox = list(bbox)
    except:
        msg = 'Could not coerce bbox %s into a list' % str(bbox)
        raise BoundingBoxError(msg)

    msg = ('Bounding box must have 4 coordinates [W, S, E, N]. '
           'I got %s' % str(bbox))
    verify(len(bbox) == 4, msg)

    for x in bbox:
        try:
            float(x)
        except ValueError, e:
            msg = ('Bounding box %s contained non-numeric entry %s, '
                   'original error was "%s".' % (bbox, x, e))
            raise AssertionError(msg)
Beispiel #3
0
    def interpolate(self, X, attribute_name=None):
        """Interpolate values of this raster layer to other layer

        Input
            X: Layer object defining target
            attribute_name: Optional name of interpolated layer.
                            If attribute_name is None,
                            the name of self is used.

        Output
            Y: Layer object with values of this raster layer interpolated to
               geometry of input layer X

        Note: If target geometry is polygon, data will be interpolated to
        its centroids and the output is a point data set.
        """

        if X.is_raster:
            if self.get_geotransform() != X.get_geotransform():
                # Need interpolation between grids
                msg = 'Intergrid interpolation not yet implemented'
                raise Exception(msg)
            else:
                # Rasters are aligned, no need to interpolate
                return self
        else:
            # Interpolate this raster layer to geometry of X
            msg = ('Name must be either a string or None. I got %s'
                   % (str(type(X)))[1:-1])
            verify(attribute_name is None
                   or isinstance(attribute_name, basestring), msg)

            return interpolate_raster_vector(self, X,
                                             attribute_name=attribute_name)
Beispiel #4
0
def convert_line_to_points(V, delta):
    """Convert line vector data to point vector data

    Input
        V: Vector layer with line data
        delta: Incremental step to find the points
    Output
        Vector layer with point data and the same attributes as V
    """

    msg = 'Input data %s must be line vector data' % V
    verify(V.is_line_data, msg)

    geometry = V.get_geometry()
    data = V.get_data()
    N = len(V)

    # Calculate centroids for each polygon
    points = []
    new_data = []
    for i in range(N):
        c = points_along_line(geometry[i], delta)
        # We need to create a data entry for each point.
        new_data.extend([data[i] for thing in c])
        points.extend(c)

    # Create new point vector layer with same attributes and return
    V = Vector(data=new_data,
               projection=V.get_projection(),
               geometry=points,
               name='%s_point_data' % V.get_name(),
               keywords=V.get_keywords())
    return V
Beispiel #5
0
def bboxstring2list(bbox_string):
    """Convert bounding box string to list

    Input
        bbox_string: String of bounding box coordinates of the form 'W,S,E,N'
    Output
        bbox: List of floating point numbers with format [W, S, E, N]
    """

    msg = ('Bounding box must be a string with coordinates following the '
           'format 105.592,-7.809,110.159,-5.647\n'
           'Instead I got %s of type %s.' % (str(bbox_string),
                                             type(bbox_string)))
    verify(isinstance(bbox_string, basestring), msg)

    fields = bbox_string.split(',')
    msg = ('Bounding box string must have 4 coordinates in the form '
           '"W,S,E,N". I got bbox == "%s"' % bbox_string)
    verify(len(fields) == 4, msg)

    for x in fields:
        try:
            float(x)
        except ValueError, e:
            msg = ('Bounding box %s contained non-numeric entry %s, '
                   'original error was "%s".' % (bbox_string, x, e))
            raise AssertionError(msg)
Beispiel #6
0
def dom2object(node):
    """Convert DOM representation to XML_object hierarchy.
    """

    value = []
    textnode_encountered = None
    for n in node.childNodes:

        if n.nodeType == 3:
            # Child is a text element - omit the dom tag #text and
            # go straight to the text value.

            # Note - only the last text value will be recorded

            msg = 'Text element has child nodes - this shouldn\'t happen'
            verify(len(n.childNodes) == 0, msg)


            x = n.nodeValue.strip()
            if len(x) == 0:
                # Skip empty text children
                continue

            textnode_encountered = value = x
        else:
            # XML element


            if textnode_encountered is not None:
                msg = 'A text node was followed by a non-text tag. This is not allowed.\n'
                msg += 'Offending text node: "%s" ' %str(textnode_encountered)
                msg += 'was followed by node named: "<%s>"' %str(n.nodeName)
                raise Exception, msg


            value.append(dom2object(n))


    # Deal with empty elements
    if len(value) == 0: value = ''


    if node.nodeType == 9:
        # Root node (document)
        tag = None
    else:
        # Normal XML node
        tag = node.nodeName


    X = XML_element(tag=tag,
                    value=value)

    return X
Beispiel #7
0
def sigab2bnpb(E, target_attribute='VCLASS'):
    """Map SIGAB point data to BNPB vulnerability classes

    Input
        E: Vector object representing the OSM data
        target_attribute: Optional name of the attribute containing
                          the mapped vulnerability class. Default
                          value is 'VCLASS'

    Output:
        Vector object like E, but with one new attribute (e.g. 'VCLASS')
        representing the vulnerability class used in the guidelines
    """

    # Input check
    required = ['Struktur_B', 'Lantai', 'Atap', 'Dinding', 'Tingkat']
    actual = E.get_attribute_names()

    msg = ('Input data to sigab2bnpb must have attributes %s. '
           'It has %s' % (str(required), str(actual)))
    for attribute in required:
        verify(attribute in actual, msg)

    # Start mapping
    N = len(E)
    attributes = E.get_data()
    for i in range(N):
        levels = E.get_data('Tingkat', i).lower()
        structure = E.get_data('Struktur_B', i).lower()
        roof_type = E.get_data('Atap', i).lower()
        wall_type = E.get_data('Dinding', i).lower()
        floor_type = E.get_data('Lantai', i).lower()
        if levels == 'none' or structure == 'none':
            vulnerability_class = 'URM'
        elif structure.startswith('beton') or structure.startswith('kayu'):
            vulnerability_class = 'RM'
        else:
            if int(levels) >= 2:
                vulnerability_class = 'RM'
            else:
                vulnerability_class = 'URM'

        # Store new attribute value
        attributes[i][target_attribute] = vulnerability_class

    # Create new vector instance and return
    V = Vector(data=attributes,
               projection=E.get_projection(),
               geometry=E.get_geometry(),
               name=E.get_name() + ' mapped to BNPB vulnerability classes',
               keywords=E.get_keywords())
    return V
Beispiel #8
0
    def get_geometry(self):
        """Return longitudes and latitudes (the axes) for grid.

        Return two vectors (longitudes and latitudes) corresponding to
        grid. The values are offset by half a pixel size to correspond to
        pixel registration.

        I.e. If the grid origin (top left corner) is (105, 10) and the
        resolution is 1 degrees in each direction, then the vectors will
        take the form

        longitudes = [100.5, 101.5, ..., 109.5]
        latitudes = [0.5, 1.5, ..., 9.5]
        """

        # Get parameters for axes
        g = self.get_geotransform()

        lon_ul = float(g[0])  # Longitude of upper left corner
        lat_ul = float(g[3])  # Latitude of upper left corner
        dx = float(g[1])      # Longitudinal resolution
        dy = - float(g[5])    # Latitudinal resolution (always(?) negative)
        nx = self.columns
        ny = self.rows

        verify(dx > 0)
        verify(dy > 0)

        # Coordinates of lower left corner
        lon_ll = lon_ul
        lat_ll = lat_ul - ny * dy

        # Coordinates of upper right corner
        lon_ur = lon_ul + nx * dx

        # Define pixel centers along each directions
        # This is to achieve pixel registration rather
        # than gridline registration
        dx2 = dx / 2
        dy2 = dy / 2

        # Define longitudes and latitudes for each axes
        x = numpy.linspace(lon_ll + dx2,
                           lon_ur - dx2, nx)
        y = numpy.linspace(lat_ll + dy2,
                           lat_ul - dy2, ny)

        # Return
        return x, y
Beispiel #9
0
    def write_to_file(self, filename):
        """Save raster data to file

        Input
            filename: filename with extension .tif
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extension '
               'tif allowed.' % filename)
        verify(extension in ['.tif', '.asc'], msg)
        format = DRIVER_MAP[extension]

        # Get raster data
        A = self.get_data()

        # Get Dimensions. Note numpy and Gdal swap order
        N, M = A.shape

        # Create empty file.
        # FIXME (Ole): It appears that this is created as single
        #              precision even though Float64 is specified
        #              - see issue #17
        driver = gdal.GetDriverByName(format)
        fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64)
        if fid is None:
            msg = ('Gdal could not create filename %s using '
                   'format %s' % (filename, format))
            raise Exception(msg)

        # Write metada
        fid.SetProjection(str(self.projection))
        fid.SetGeoTransform(self.geotransform)

        # Write data
        fid.GetRasterBand(1).WriteArray(A)

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')
Beispiel #10
0
def interpolate_raster_vector(R, V, attribute_name=None):
    """Interpolate from raster layer to vector data

    Input
        R: Raster data set (grid)
        V: Vector data set (points or polygons)
        attribute_name: Name for new attribute.
              If None (default) the name of R is used

    Output
        I: Vector data set; points located as V with values interpolated from R

    Note: If target geometry is polygon, data will be interpolated to
    its centroids and the output is a point data set.
    """

    # Input checks
    verify(R.is_raster)
    verify(V.is_vector)

    if V.is_polygon_data:
        # Use centroids, in case of polygons
        P = convert_polygons_to_centroids(V)
    else:
        P = V

    # Interpolate from raster to point data
    R = interpolate_raster_vector_points(R, P,
                                         attribute_name=attribute_name)
    if V.is_polygon_data:
        # In case of polygon data, restore the polygon geometry
        # Do this setting the geometry of the returned set to
        # that of the original polygon
        R = Vector(data=R.get_data(),
                   projection=R.get_projection(),
                   geometry=V.get_geometry(),
                   name=R.get_name())

    # Return interpolated vector layer
    return R
Beispiel #11
0
    def get_bins(self, N=10, quantiles=False):
        """Get N values between the min and the max occurred in this dataset.

        Return sorted list of length N+1 where the first element is min and
        the last is max. Intermediate values depend on the keyword quantiles:
        If quantiles is True, they represent boundaries between quantiles.
        If quantiles is False, they represent equidistant interval boundaries.
        """

        min, max = self.get_extrema()

        levels = []
        if quantiles is False:
            # Linear intervals
            d = (max - min) / N

            for i in range(N):
                levels.append(min + i * d)
        else:
            # Quantiles
            # FIXME (Ole): Not 100% sure about this algorithm,
            # but it is close enough

            A = self.get_data(nan=True).flat[:]

            mask = numpy.logical_not(numpy.isnan(A))  # Omit NaN's
            A = A.compress(mask)

            A.sort()

            verify(len(A) == A.shape[0])

            d = float(len(A) + 0.5) / N
            for i in range(N):
                levels.append(A[int(i * d)])

        levels.append(max)

        return levels
Beispiel #12
0
    def get_data(self, attribute=None, index=None, copy=False):
        """Get vector attributes

        Data is returned as a list where each entry is a dictionary of
        attributes for one feature. Entries in get_geometry() and
        get_data() are related as 1-to-1

        If optional argument attribute is specified and a valid name,
        then the list of values for that attribute is returned.

        If optional argument index is specified on the that value will
        be returned. Any value of index is ignored if attribute is None.

        If optional argument copy is True and all attributes are requested,
        a copy will be returned. Otherwise a pointer to the data is returned.
        """

        if hasattr(self, 'data'):
            if attribute is None:
                if copy:
                    return copy_module.deepcopy(self.data)
                else:
                    return self.data
            else:
                msg = ('Specified attribute %s does not exist in '
                       'vector layer %s. Valid names are %s'
                       '' % (attribute, self, self.data[0].keys()))
                verify(attribute in self.data[0], msg)

                if index is None:
                    # Return all values for specified attribute
                    return [x[attribute] for x in self.data]
                else:
                    # Return value for specified attribute and index
                    msg = ('Specified index must be either None or '
                           'an integer. I got %s' % index)
                    verify(type(index) == type(0), msg)

                    msg = ('Specified index must lie within the bounds '
                           'of vector layer %s which is [%i, %i]'
                           '' % (self, 0, len(self) - 1))
                    verify(0 <= index < len(self), msg)

                    return self.data[index][attribute]
        else:
            msg = 'Vector data instance does not have any attributes'
            raise Exception(msg)
Beispiel #13
0
    def get_topN(self, attribute, N=10):
        """Get top N features

        Input
            attribute: The name of attribute where values are sought
            N: How many

        Output
            layer: New vector layer with selected features
        """

        # FIXME (Ole): Maybe generalise this to arbitrary expressions

        # Input checks
        msg = ('Specfied attribute must be a string. '
               'I got %s' % (type(attribute)))
        verify(isinstance(attribute, basestring), msg)

        msg = 'Specified attribute was empty'
        verify(attribute != '', msg)

        msg = 'N must be a positive number. I got %i' % N
        verify(N > 0, msg)

        # Create list of values for specified attribute
        values = self.get_data(attribute)

        # Sort and select using Schwarzian transform
        A = zip(values, self.data, self.geometry)
        A.sort()

        # Pick top N and unpack
        _, data, geometry = zip(*A[-N:])

        # Create new Vector instance and return
        return Vector(data=data,
                      projection=self.get_projection(),
                      geometry=geometry)
Beispiel #14
0
    def __init__(self, name='', projection=None,
                 keywords=None, style_info=None):
        """Common constructor for all types of layers

        See docstrings for class Raster and class Vector for details.
        """

        # Name
        msg = ('Specified name  must be a string. '
               'I got %s with type %s' % (name, str(type(name))[1:-1]))
        verify(isinstance(name, basestring), msg)
        self.name = name

        # Projection
        self.projection = Projection(projection)

        # Keywords
        if keywords is None:
            self.keywords = {}
        else:
            msg = ('Specified keywords must be either None or a '
                   'dictionary. I got %s' % keywords)
            verify(isinstance(keywords, dict), msg)
            self.keywords = keywords

        # Style info
        if style_info is None:
            self.style_info = {}
        else:
            msg = ('Specified style_info must be either None or a '
                   'dictionary. I got %s' % style_info)
            verify(isinstance(style_info, dict), msg)
            self.style_info = style_info

        # Defaults
        self.filename = None
        self.data = None
Beispiel #15
0
    def get_data(self, nan=True, scaling=None, copy=False):
        """Get raster data as numeric array

        Input
            nan: Optional flag controlling handling of missing values.
                 If nan is True (default), nodata values will be replaced
                 with numpy.nan
                 If keyword nan has a numeric value, nodata values will
                 be replaced by that value. E.g. to set missing values to 0,
                 do get_data(nan=0.0)
            scaling: Optional flag controlling if data is to be scaled
                     if it has been resampled. Admissible values are
                     False: data is retrieved without modification.
                     True: Data is rescaled based on the squared ratio between
                           its current and native resolution. This is typically
                           required if raster data represents a density
                           such as population per km^2
                     None: The behaviour will depend on the keyword
                           "population" associated with the layer. If
                           it is "density", scaling will be applied
                           otherwise not. This is the default.
                     scalar value: If scaling takes a numerical scalar value,
                                   that will be use to scale the data
        copy (optional): If present and True return copy

        NOTE: Scaling does not currently work with projected layers.
        See issue #123
        """

        if hasattr(self, 'data') and self.data is not None:
            if copy:
                A = copy_module.deepcopy(self.data)
            else:
                A = self.data
            verify(A.shape[0] == self.rows and A.shape[1] == self.columns)
        else:
            # Read from raster file
            A = self.band.ReadAsArray()

            # Convert to double precision (issue #75)
            A = numpy.array(A, dtype=numpy.float64)

            # Self check
            M, N = A.shape
            msg = ('Dimensions of raster array do not match those of '
                   'raster file %s' % self.filename)
            verify(M == self.rows, msg)
            verify(N == self.columns, msg)

        # Handle no data value
        if nan is False:
            pass
        else:
            if nan is True:
                NAN = numpy.nan
            else:
                NAN = nan

            # Replace NODATA_VALUE with NaN
            nodata = self.get_nodata_value()
            NaN = numpy.ones(A.shape, A.dtype) * NAN
            A = numpy.where(A == nodata, NaN, A)

        # Take care of possible scaling
        if scaling is None:
            # Redefine scaling from density keyword if possible
            kw = self.get_keywords()
            if 'datatype' in kw and kw['datatype'].lower() == 'density':
                scaling = True
            else:
                scaling = False

        if scaling is False:
            # No change
            sigma = 1
        elif scaling is True:
            # Calculate scaling based on resolution change

            actual_res = self.get_resolution(isotropic=True)
            native_res = self.get_resolution(isotropic=True, native=True)
            #print
            #print 'Actual res', actual_res
            #print 'Native res', native_res
            sigma = (actual_res / native_res) ** 2
            #print 'Scaling', sigma
        else:
            # See if scaling can work as a scalar value
            try:
                sigma = float(scaling)
            except Exception, e:
                msg = ('Keyword scaling "%s" could not be converted to a '
                       'number. It must be either True, False, None or a '
                       'number: %s' % (scaling, str(e)))
                raise Exception(msg)
Beispiel #16
0
def check_data_integrity(layer_objects):
    """Check list of layer objects

    Input
        layer_objects: List of InaSAFE layer instances

    Output
        Nothing

    Raises
        Exceptions for a range of errors

    This function checks that
    * Layers have correct keywords
    * That they have the same georeferences
    """

    # Link to documentation
    manpage = ('http://risiko_dev.readthedocs.org/en/latest/usage/'
               'plugins/development.html')
    instructions = ('Please add keywords as <keyword>:<value> pairs '
                    ' in the .keywords file. For more information '
                    'please read the sections on impact functions '
                    'and keywords in the manual: %s' % manpage)

    # Set default values for projection and geotransform.
    # Enforce DEFAULT (WGS84).
    # Choosing 'None' will use value of first layer.
    reference_projection = Projection(DEFAULT_PROJECTION)
    geotransform = None
    coordinates = None

    for layer in layer_objects:

        # Check that critical keywords exist and are non empty
        keywords = layer.get_keywords()
        for kw in REQUIRED_KEYWORDS:
            msg = ('Layer %s did not have required keyword "%s". '
                   '%s' % (layer.name, kw, instructions))
            verify(kw in keywords, msg)

            val = keywords[kw]
            msg = ('No value found for keyword "%s" in layer %s. '
                   '%s' % (kw, layer.name, instructions))
            verify(val, msg)

        # Ensure that projection is consistent across all layers
        if reference_projection is None:
            reference_projection = layer.projection
        else:
            msg = ('Projections in input layer %s is not as expected:\n'
                   'projection: %s\n'
                   'default:    %s'
                   '' % (layer, layer.projection, reference_projection))
            verify(reference_projection == layer.projection, msg)

        # Ensure that geotransform and dimensions is consistent across
        # all *raster* layers
        if layer.is_raster:
            if geotransform is None:
                geotransform = layer.get_geotransform()
            else:
                msg = ('Geotransforms in input raster layers are different: '
                       '%s %s' % (geotransform, layer.get_geotransform()))
                verify(numpy.allclose(geotransform,
                                      layer.get_geotransform(),
                                      rtol=1.0e-12), msg)

        # In case of vector layers, we just check that they are non-empty
        # FIXME (Ole): Not good as nasty error is raised in cases where
        # there are no buildings in the hazard area. Need to be more graceful
        # See e.g. shakemap dated 20120227190230
        if layer.is_vector:
            msg = ('There are no vector data features. '
                   'Perhaps zoom out or pan to the study area '
                   'and try again')
            verify(len(layer) > 0, msg)

    # Check that arrays are aligned.

    refname = None
    for layer in layer_objects:
        if layer.is_raster:

            if refname is None:
                refname = layer.get_name()
                M = layer.rows
                N = layer.columns

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i rows but raster %s has %i rows\n'
                   'Refer to issue #102' % (layer.get_name(),
                                            layer.rows,
                                            refname, M))
            verify(layer.rows == M, msg)

            msg = ('Rasters are not aligned!\n'
                   'Raster %s has %i columns but raster %s has %i columns\n'
                   'Refer to issue #102' % (layer.get_name(),
                                            layer.columns,
                                            refname, N))
            verify(layer.columns == N, msg)
Beispiel #17
0
            raise Exception(msg)

        if native:
            keywords = self.get_keywords()
            if 'resolution' in keywords:

                resolution = keywords['resolution']
                try:
                    res = float(resolution)
                except:
                    # Assume resolution is a string of the form:
                    # (0.00045228819716044, 0.00045228819716044)

                    msg = ('Unknown format for resolution keyword: %s'
                           % resolution)
                    verify((resolution.startswith('(') and
                            resolution.endswith(')')), msg)

                    dx, dy = [float(s) for s in resolution[1:-1].split(',')]
                    if not isotropic:
                        res = (dx, dy)
                    else:
                        msg = ('Resolution of layer "%s" was not isotropic: '
                               '[dx, dy] == %s' % (self.get_name(), res))
                        verify(numpy.allclose(dx, dy,
                                              rtol=1.0e-12, atol=1.0e-12), msg)
                        res = dx
                else:
                    if not isotropic:
                        res = (res, res)

        # Return either 2-tuple or scale depending on isotropic
Beispiel #18
0
def calculate_impact(layers, impact_fcn,
                     comment=''):
    """Calculate impact levels as a function of list of input layers

    Input
        layers: List of Raster and Vector layer objects to be used for analysis

        impact_fcn: Function of the form f(layers)
        comment:

    Output
        filename of resulting impact layer (GML). Comment is embedded as
        metadata. Filename is generated from input data and date.

    Note
        The admissible file types are tif and asc/prj for raster and
        gml or shp for vector data

    Assumptions
        1. All layers are in WGS84 geographic coordinates
        2. Layers are equipped with metadata such as names and categories
    """

    # Input checks
    check_data_integrity(layers)

    # Get an instance of the passed impact_fcn
    impact_function = impact_fcn()

    # Pass input layers to plugin
    F = impact_function.run(layers)

    msg = 'Impact function %s returned None' % str(impact_function)
    verify(F is not None, msg)

    # Write result and return filename
    if F.is_raster:
        extension = '.tif'
        # use default style for raster
    else:
        extension = '.shp'
        # use default style for vector

    output_filename = unique_filename(suffix=extension)
    F.filename = output_filename
    F.write_to_file(output_filename)

    # Establish default name (layer1 X layer1 x impact_function)
    if not F.get_name():
        default_name = ''
        for layer in layers:
            default_name += layer.name + ' X '

        if hasattr(impact_function, 'plugin_name'):
            default_name += impact_function.plugin_name
        else:
            # Strip trailing 'X'
            default_name = default_name[:-2]

        F.set_name(default_name)

    # FIXME (Ole): If we need to save style as defined by the impact_function
    #              this is the place

    # Return layer object
    return F
Beispiel #19
0
def interpolate_raster_vector_points(R, V, attribute_name=None):
    """Interpolate from raster layer to point data

    Input
        R: Raster data set (grid)
        V: Vector data set (points)
        attribute_name: Name for new attribute.
              If None (default) the name of layer R is used

    Output
        I: Vector data set; points located as V with values interpolated from R

    """

    msg = ('There are no data points to interpolate to. Perhaps zoom out '
           'and try again')
    verify(len(V) > 0, msg)

    # Input checks
    verify(R.is_raster)
    verify(V.is_vector)
    verify(V.is_point_data)

    # Get raster data and corresponding x and y axes
    A = R.get_data(nan=True)
    longitudes, latitudes = R.get_geometry()
    verify(len(longitudes) == A.shape[1])
    verify(len(latitudes) == A.shape[0])

    # Get vector point geometry as Nx2 array
    coordinates = numpy.array(V.get_geometry(),
                              dtype='d',
                              copy=False)
    # Get original attributes
    attributes = V.get_data()

    # Create new attribute and interpolate
    N = len(V)
    if attribute_name is None:
        attribute_name = R.get_name()

    try:
        values = interpolate_raster(longitudes, latitudes, A,
                                    coordinates, mode='linear')
    except Exception, e:
        msg = (_('Could not interpolate from raster layer %(raster)s to '
                 'vector layer %(vector)s. Error message: %(error)s')
               % {'raster': R.get_name(),
                  'vector': V.get_name(),
                  'error': str(e)})
        raise Exception(msg)
Beispiel #20
0
def check_bbox_string(bbox_string):
    """Check that bbox string is valid
    """

    msg = 'Expected bbox as a string with format "W,S,E,N"'
    verify(isinstance(bbox_string, basestring), msg)

    # Use checks from string to list conversion
    # FIXME (Ole): Would be better to separate the checks from the conversion
    # and use those checks directly.
    minx, miny, maxx, maxy = bboxstring2list(bbox_string)

    # Check semantic integrity
    msg = ('Western border %.5f of bounding box %s was out of range '
           'for longitudes ([-180:180])' % (minx, bbox_string))
    verify(-180 <= minx <= 180, msg)

    msg = ('Eastern border %.5f of bounding box %s was out of range '
           'for longitudes ([-180:180])' % (maxx, bbox_string))
    verify(-180 <= maxx <= 180, msg)

    msg = ('Southern border %.5f of bounding box %s was out of range '
           'for latitudes ([-90:90])' % (miny, bbox_string))
    verify(-90 <= miny <= 90, msg)

    msg = ('Northern border %.5f of bounding box %s was out of range '
           'for latitudes ([-90:90])' % (maxy, bbox_string))
    verify(-90 <= maxy <= 90, msg)

    msg = ('Western border %.5f was greater than or equal to eastern border '
           '%.5f of bounding box %s' % (minx, maxx, bbox_string))
    verify(minx < maxx, msg)

    msg = ('Southern border %.5f was greater than or equal to northern border '
           '%.5f of bounding box %s' % (miny, maxy, bbox_string))
    verify(miny < maxy, msg)
    def run(self, layers):
        """Risk plugin for Padang building survey
        """

        # Extract data
        H = get_hazard_layer(layers)    # Ground shaking
        E = get_exposure_layer(layers)  # Building locations

        datatype = E.get_keywords()['datatype']
        vclass_tag = 'ITB_Class'
        if datatype.lower() == 'osm':
            # Map from OSM attributes to the ITB building classes
#            Emap = osm2itb(E)
            print 'osm2itb has not been implemented'
        elif datatype.lower() == 'sigab':
#            Emap = sigabitb(E)
            print 'sigab2itb has not been implemented'
        elif datatype.lower() == 'itb':
            Emap = E

        # Interpolate hazard level to building locations
        Hi = H.interpolate(Emap, attribute_name='MMI')

        # Extract relevant numerical data
        coordinates = Emap.get_geometry()
        shaking = Hi.get_data()
        N = len(shaking)

        # List attributes to carry forward to result layer
        attributes = Emap.get_attribute_names()
        # Calculate building damage
        count50 = 0
        count25 = 0
        count10 = 0
        count0 = 0
        building_damage = []
        for i in range(N):
            mmi = float(shaking[i]['MMI'])

            building_class = Emap.get_data(vclass_tag, i)

            building_type = str(building_class)
            damage_params = vul_curves[building_type]
            beta = damage_params['beta']
            median = damage_params['median']

            msg = 'Invalid parameter value for ' + building_type
            verify(beta + median > 0.0, msg)
            percent_damage = lognormal_cdf(mmi,
                                           median=median,
                                           sigma=beta) * 100

            # Collect shake level and calculated damage
            result_dict = {self.target_field: percent_damage,
                           'MMI': mmi}

            # Carry all orginal attributes forward
            for key in attributes:
                result_dict[key] = Emap.get_data(key, i)

            # Record result for this feature
            building_damage.append(result_dict)

            # Debugging
            #if percent_damage > 0.01:
            #    print mmi, percent_damage

            # Calculate statistics
            if percent_damage < 10:
                count0 += 1

            if 10 <= percent_damage < 33:
                count10 += 1

            if 33 <= percent_damage < 66:
                count25 += 1

            if 66 <= percent_damage:
                count50 += 1

#        fid.close()
        # Create report
        Hname = H.get_name()
        Ename = E.get_name()
        impact_summary = ('<b>In case of "%s" the estimated impact to '
                           '"%s" '
                           'is&#58;</b><br><br><p>' % (Hname, Ename))
        impact_summary += ('<table border="0" width="320px">'
                   '   <tr><th><b>%s</b></th><th><b>%s</b></th></th>'
                    '   <tr></tr>'
                    '   <tr><td>%s&#58;</td><td>%i</td></tr>'
                    '   <tr><td>%s (<10%%)&#58;</td><td>%i</td></tr>'
                    '   <tr><td>%s (10-33%%)&#58;</td><td>%i</td></tr>'
                    '   <tr><td>%s (33-66%%)&#58;</td><td>%i</td></tr>'
                    '   <tr><td>%s (66-100%%)&#58;</td><td>%i</td></tr>'
                    '</table></font>' % (_('Buildings'), _('Total'),
                                  _('All'), N,
                                  _('No damage'), count0,
                                  _('Low damage'), count10,
                                  _('Medium damage'), count25,
                                  _('High damage'), count50))
        impact_summary += '<br>'  # Blank separation row
        impact_summary += '<b>' + _('Assumption') + '&#58;</b><br>'
        # This is the proper text:
        #_('Levels of impact are defined by post 2009 '
        #  'Padang earthquake survey conducted by Geoscience '
        #  'Australia and Institute of Teknologi Bandung.'))
        #_('Unreinforced masonry is assumed where no '
        #  'structural information is available.'))
        impact_summary += _('Levels of impact are defined by post 2009 '
                            'Padang earthquake survey conducted by Geoscience '
                            'Australia and Institute of Teknologi Bandung.')
        impact_summary += _('Unreinforced masonry is assumed where no '
                            'structural information is available.')
        # Create style
        style_classes = [dict(label=_('No damage'), min=0, max=10,
                              colour='#00ff00', transparency=1),
                         dict(label=_('Low damage'), min=10, max=33,
                              colour='#ffff00', transparency=1),
                         dict(label=_('Medium damage'), min=33, max=66,
                              colour='#ffaa00', transparency=1),
                         dict(label=_('High damage'), min=66, max=100,
                              colour='#ff0000', transparency=1)]
        style_info = dict(target_field=self.target_field,
                          style_classes=style_classes)

        # Create vector layer and return
        V = Vector(data=building_damage,
                   projection=E.get_projection(),
                   geometry=coordinates,
                   name='Estimated pct damage',
                   keywords={'impact_summary': impact_summary},
                   style_info=style_info)
        return V
Beispiel #22
0
    def __init__(self, data=None, projection=None, geometry=None,
                 geometry_type=None,
                 name='', keywords=None, style_info=None):
        """Initialise object with either geometry or filename

        Input
            data: Can be either
                * a filename of a vector file format known to GDAL
                * List of dictionaries of fields associated with
                  point coordinates
                * None
            projection: Geospatial reference in WKT format.
                        Only used if geometry is provide as a numeric array,
                        if None, WGS84 geographic is assumed
            geometry: A list of either point coordinates or polygons/lines
                      (see note below)
            geometry_type: Desired interpretation of geometry.
                           Valid options are 'point', 'line', 'polygon' or
                           the ogr types: 1, 2, 3
                           If None, a geometry_type will be inferred
            name: Optional name for layer.
                  Only used if geometry is provide as a numeric array
            keywords: Optional dictionary with keywords that describe the
                      layer. When the layer is stored, these keywords will
                      be written into an associated file with extension
                      .keywords.

                      Keywords can for example be used to display text
                      about the layer in a web application.
            style_info: Dictionary with information about how this layer
                        should be styled. See impact_functions/styles.py
                        for examples.

        Notes

        If data is a filename, all other arguments are ignored
        as they will be inferred from the file.

        The geometry type will be inferred from the dimensions of geometry.
        If each entry is one set of coordinates the type will be ogr.wkbPoint,
        if it is an array of coordinates the type will be ogr.wkbPolygon.

        Each polygon or line feature take the form of an Nx2 array representing
        vertices where line segments are joined
        """

        # Invoke common layer constructor
        Layer.__init__(self,
                       name=name,
                       projection=projection,
                       keywords=keywords,
                       style_info=style_info)

        # Input checks
        if data is None and geometry is None:
            # Instantiate empty object
            self.geometry_type = None
            self.extent = [0, 0, 0, 0]
            return

        if isinstance(data, basestring):
            self.read_from_file(data)
        else:
            # Assume that data is provided as sequences provided as
            # arguments to the Vector constructor
            # with extra keyword arguments supplying metadata

            msg = 'Geometry must be specified'
            verify(geometry is not None, msg)

            msg = 'Geometry must be a sequence'
            verify(is_sequence(geometry), msg)
            self.geometry = geometry

            self.geometry_type = get_geometry_type(geometry, geometry_type)

            if data is None:
                # Generate default attribute as OGR will do that anyway
                # when writing
                data = []
                for i in range(len(geometry)):
                    data.append({'ID': i})

            # Check data
            self.data = data
            if data is not None:
                msg = 'Data must be a sequence'
                verify(is_sequence(data), msg)

                msg = ('The number of entries in geometry and data '
                       'must be the same')
                verify(len(geometry) == len(data), msg)
Beispiel #23
0
    def interpolate(self, X, name=None, attribute_name=None):
        """Interpolate values of this vector layer to other layer

        Input
            X: Layer object defining target
            name: Optional name of returned interpolated layer
            attribute_name: Optional attribute name to use.
                            If None, all attributes are used.

                            FIXME (Ole): Single attribute not tested well yet
                            and not implemented for lines

        Output
            Y: Layer object with values of this vector layer interpolated to
               geometry of input layer X
        """

        msg = 'Input to Vector.interpolate must be a vector layer instance'
        verify(X.is_vector, msg)

        msg = ('Projections must be the same: I got %s and %s'
               % (self.projection, X.projection))
        verify(self.projection == X.projection, msg)

        msg = ('Vector layer to interpolate from must be polygon geometry. '
               'I got OGR geometry type %s'
               % geometrytype2string(self.geometry_type))
        verify(self.is_polygon_data, msg)

        # FIXME (Ole): Organise this the same way it is done with rasters
        original_geometry = X.get_geometry()  # Geometry for returned data
        if X.is_polygon_data:
            # Use centroids, in case of polygons
            X = convert_polygons_to_centroids(X)
        elif X.is_line_data:

            # Clip lines to polygon and return centroids

            # FIXME (Ole): Need to separate this out, but identify what is
            #              common with points and lines
            #

            #X.write_to_file('line_data.shp')
            #self.write_to_file('poly_data.shp')

            # Extract line features
            lines = X.get_geometry()
            line_attributes = X.get_data()
            N = len(X)
            verify(len(lines) == N)
            verify(len(line_attributes) == N)

            # Extract polygon features
            polygons = self.get_geometry()
            poly_attributes = self.get_data()
            verify(len(polygons) == len(poly_attributes))

            # Data structure for resulting line segments
            clipped_geometry = []
            clipped_attributes = []

            # Clip line lines to polygons
            for i, polygon in enumerate(polygons):
                for j, line in enumerate(lines):
                    inside, outside = clip_line_by_polygon(line, polygon)

                    # Create new attributes
                    # FIXME (Ole): Not done single specified polygon
                    #              attribute
                    inside_attributes = {}
                    outside_attributes = {}
                    for key in line_attributes[j]:
                        inside_attributes[key] = line_attributes[j][key]
                        outside_attributes[key] = line_attributes[j][key]

                    for key in poly_attributes[i]:
                        inside_attributes[key] = poly_attributes[i][key]
                        outside_attributes[key] = None

                    # Always create default attribute flagging if segment was
                    # inside any of the polygons
                    inside_attributes[DEFAULT_ATTRIBUTE] = True
                    outside_attributes[DEFAULT_ATTRIBUTE] = False

                    # Assign new attribute set to clipped lines
                    for segment in inside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(inside_attributes)

                    for segment in outside:
                        clipped_geometry.append(segment)
                        clipped_attributes.append(outside_attributes)

            # Create new Vector instance and return
            V = Vector(data=clipped_attributes,
                       projection=X.get_projection(),
                       geometry=clipped_geometry,
                       geometry_type='line')
            #V.write_to_file('clipped_and_tagged.shp')
            return V

        # The following applies only to Polygon-Point interpolation
        msg = ('Vector layer to interpolate to must be point geometry. '
               'I got OGR geometry type %s'
               % geometrytype2string(X.geometry_type))
        verify(X.is_point_data, msg)

        msg = ('Name must be either a string or None. I got %s'
               % (str(type(X)))[1:-1])
        verify(name is None or isinstance(name, basestring), msg)

        msg = ('Attribute must be either a string or None. I got %s'
               % (str(type(X)))[1:-1])
        verify(attribute_name is None or
               isinstance(attribute_name, basestring), msg)

        attribute_names = self.get_attribute_names()
        if attribute_name is not None:
            msg = ('Requested attribute "%s" did not exist in %s'
                   % (attribute_name, attribute_names))
            verify(attribute_name in attribute_names, msg)

        #----------------
        # Start algorithm
        #----------------

        # Extract point features
        points = ensure_numeric(X.get_geometry())
        attributes = X.get_data()
        N = len(X)

        # Extract polygon features
        geom = self.get_geometry()
        data = self.get_data()
        verify(len(geom) == len(data))

        # Augment point features with empty attributes from polygon
        for a in attributes:
            if attribute_name is None:
                # Use all attributes
                for key in attribute_names:
                    a[key] = None
            else:
                # Use only requested attribute
                # FIXME (Ole): Test for this is not finished
                a[attribute_name] = None

            # Always create default attribute flagging if point was
            # inside any of the polygons
            a[DEFAULT_ATTRIBUTE] = None

        # Traverse polygons and assign attributes to points that fall inside
        for i, polygon in enumerate(geom):
            if attribute_name is None:
                # Use all attributes
                poly_attr = data[i]
            else:
                # Use only requested attribute
                poly_attr = {attribute_name: data[i][attribute_name]}

            # Assign default attribute to indicate points inside
            poly_attr[DEFAULT_ATTRIBUTE] = True

            # Clip data points by polygons and add polygon attributes
            indices = inside_polygon(points, polygon)
            for k in indices:
                for key in poly_attr:
                    # Assign attributes from polygon to points
                    attributes[k][key] = poly_attr[key]

        # Create new Vector instance and return
        V = Vector(data=attributes,
                   projection=X.get_projection(),
                   geometry=original_geometry,
                   name=X.get_name())
        return V
Beispiel #24
0
def sigab2padang(E):
    """Map SIGAB attributes to Padang vulnerability classes

    Input
        E: Vector object representing the SIGAB data

    Output:
        Vector object like E, but with one new attribute ('VCLASS')
        representing the vulnerability class used in the Padang dataset

    """

    # Input check
    required = ['Struktur_B', 'Lantai', 'Atap', 'Dinding', 'Tingkat']
    actual = E.get_attribute_names()

    msg = ('Input data to sigab2bnpb must have attributes %s. '
           'It has %s' % (str(required), str(actual)))
    for attribute in required:
        verify(attribute in actual, msg)

    # Start mapping
    N = len(E)
    attributes = E.get_data()
    for i in range(N):
        levels = E.get_data('Tingkat', i).lower()
        structure = E.get_data('Struktur_B', i).lower()
        roof_type = E.get_data('Atap', i).lower()
        wall_type = E.get_data('Dinding', i).lower()
        floor_type = E.get_data('Lantai', i).lower()
        if levels == 'none' or structure == 'none':
            vulnerability_class = 2
        else:
            if int(levels) >= 2:
                vulnerability_class = 7  # RC low
            else:
                # Low
                if structure in ['beton bertulang']:
                    vulnerability_class = 6  # Concrete shear
                elif structure.startswith('rangka'):
                    vulnerability_class = 8  # Confined
                elif 'kayu' in structure or 'wood' in structure:
                    vulnerability_class = 9  # Wood
                else:
                    vulnerability_class = 2  # URM

        # Store new attribute value
        attributes[i]['VCLASS'] = vulnerability_class

        # Selfcheck for use with osm_080811.shp
        if E.get_name() == 'osm_080811':
            if levels > 0:
                msg = ('Got %s expected %s. levels = %f, structure = %s'
                       % (vulnerability_class,
                          attributes[i]['TestBLDGCl'],
                          levels,
                          structure))
                verify(numpy.allclose(attributes[i]['TestBLDGCl'],
                                      vulnerability_class), msg)

    # Create new vector instance and return
    V = Vector(data=attributes,
               projection=E.get_projection(),
               geometry=E.get_geometry(),
               name=E.get_name() + ' mapped to Padang vulnerability classes',
               keywords=E.get_keywords())
    return V
Beispiel #25
0
def osm2bnpb(E, target_attribute='VCLASS'):
    """Map OSM attributes to BNPB vulnerability classes

    This maps attributes collected in the OpenStreetMap exposure data
    (data.kompetisiosm.org) to 2 vulnerability classes identified by
    BNPB in Kajian Risiko Gempabumi VERS 1.0, 2011. They are
    URM: Unreinforced Masonry and RM: Reinforced Masonry

    Input
        E: Vector object representing the OSM data
        target_attribute: Optional name of the attribute containing
                          the mapped vulnerability class. Default
                          value is 'VCLASS'

    Output:
        Vector object like E, but with one new attribute (e.g. 'VCLASS')
        representing the vulnerability class used in the guidelines
    """

    # Input check
    required = ['levels', 'structure']
    actual = E.get_attribute_names()
    msg = ('Input data to osm2bnpb must have attributes %s. '
           'It has %s' % (str(required), str(actual)))
    for attribute in required:
        verify(attribute in actual, msg)

    # Start mapping
    N = len(E)
    attributes = E.get_data()
    count = 0
    for i in range(N):
        levels = E.get_data('levels', i)
        structure = E.get_data('structure', i)
        if levels is None or structure is None:
            vulnerability_class = 'URM'
            count += 1
        else:
            # Map string variable levels to integer
            if levels.endswith('+'):
                levels = 100

            try:
                levels = int(levels)
            except:
                # E.g. 'ILP jalan'
                vulnerability_class = 'URM'
                count += 1
            else:
                # Start mapping depending on levels
                if levels >= 4:
                    # High
                    vulnerability_class = 'RM'
                elif 1 <= levels < 4:
                    # Low
                    if structure in ['reinforced_masonry', 'confined_masonry']:
                        vulnerability_class = 'RM'
                    elif 'kayu' in structure or 'wood' in structure:
                        vulnerability_class = 'RM'
                    else:
                        vulnerability_class = 'URM'
                elif numpy.allclose(levels, 0):
                    # A few buildings exist with 0 levels.

                    # In general, we should be assigning here the most
                    # frequent building in the area which could be defined
                    # by admin boundaries.
                    vulnerability_class = 'URM'
                else:
                    msg = 'Unknown number of levels: %s' % levels
                    raise Exception(msg)

        # Store new attribute value
        attributes[i][target_attribute] = vulnerability_class

    #print 'Got %i without levels or structure (out of %i total)' % (count, N)

    # Create new vector instance and return
    V = Vector(data=attributes,
               projection=E.get_projection(),
               geometry=E.get_geometry(),
               name=E.get_name() + ' mapped to BNPB vulnerability classes',
               keywords=E.get_keywords())
    return V
Beispiel #26
0
    def run(layers):
        """Risk plugin for earthquake fatalities

        Input
          layers: List of layers expected to contain
              H: Raster layer of flood depth
              P: Raster layer of population data on the same grid as H
        """

        # Depth above which people are regarded affected [m]
        threshold = 0.1
        thresholds = [0.1, 0.2, 0.3, 0.5, 0.8, 1.0]

        # Identify hazard and exposure layers
        inundation = get_hazard_layer(layers)  # Flood inundation [m]

        # Get population and gender ratio
        population = gender_ratio = None
        for layer in get_exposure_layers(layers):
            keywords = layer.get_keywords()

            if 'datatype' not in keywords:
                population = layer
            else:
                datatype = keywords['datatype']

                if 'ratio' not in datatype:
                    population = layer
                else:
                    # if 'female' in datatype and 'ratio' in datatype:
                    gender_ratio_unit = keywords['unit']

                    msg = ('Unit for gender ratio must be either '
                           '"percent" or "ratio"')
                    if gender_ratio_unit not in ['percent', 'ratio']:
                        raise Exception(msg)

                    gender_ratio = layer

        msg = 'No population layer was found in: %s' % str(layers)
        verify(population is not None, msg)

        # Extract data as numeric arrays
        D = inundation.get_data(nan=0.0)  # Depth

        # Calculate impact as population exposed to depths > threshold
        if population.get_resolution(native=True, isotropic=True) < 0.0005:
            # Keep this for backwards compatibility just a little while
            # This uses the original custom population set and
            # serves as a reference

            P = population.get_data(nan=0.0)  # Population density
            pixel_area = 2500
            I = numpy.where(D > threshold, P, 0) / 100000.0 * pixel_area
        else:
            # This is the new generic way of scaling (issue #168 and #172)
            P = population.get_data(nan=0.0, scaling=True)
            I = numpy.where(D > threshold, P, 0)

        if gender_ratio is not None:
            # Extract gender ratio at each pixel (as ratio)
            G = gender_ratio.get_data(nan=0.0)
            if gender_ratio_unit == 'percent':
                G /= 100

            # Calculate breakdown
            P_female = P * G
            P_male = P - P_female

            I_female = I * G
            I_male = I - I_female

        # Generate text with result for this study
        total = str(int(sum(P.flat) / 1000))
        count = str(int(sum(I.flat) / 1000))

        # Create report
        impact_summary = ('<table border="0" width="320px">'
                   '   <tr><td><b>%s&#58;</b></td>'
                   '<td align="right"><b>%s</b></td></tr>'
                   % ('Jumlah Penduduk', total))
        if gender_ratio is not None:
            total_female = str(int(sum(P_female.flat) / 1000))
            total_male = str(int(sum(P_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Wanita', total_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Pria', total_male))
            impact_summary += '<tr><td>&nbsp;</td></tr>'  # Blank row

        impact_summary += ('   <tr><td><b>%s&#58;</b></td>'
                    '<td align="right"><b>%s</b></td></tr>'
                    % ('Perkiraan Jumlah Terdampak (> %.1fm)' % threshold,
                       count))

        if gender_ratio is not None:
            affected_female = str(int(sum(I_female.flat) / 1000))
            affected_male = str(int(sum(I_male.flat) / 1000))

            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Wanita', affected_female))
            impact_summary += ('        <tr><td>%s&#58;</td>'
                        '<td align="right">%s</td></tr>'
                        % (' - Pria', affected_male))

        impact_summary += '</table>'

        impact_summary += '<br>'  # Blank separation row
        impact_summary += 'Catatan&#58; Semua nomor x 1000'

        # Create raster object and return
        R = Raster(I,
                   projection=inundation.get_projection(),
                   geotransform=inundation.get_geotransform(),
                   name='People affected',
                   keywords={'impact_summary': impact_summary})
        return R
Beispiel #27
0
def osm2padang(E):
    """Map OSM attributes to Padang vulnerability classes

    This maps attributes collected in the OpenStreetMap exposure data
    (data.kompetisiosm.org) to 9 vulnerability classes identified by
    Geoscience Australia and ITB in the post 2009 Padang earthquake
    survey (http://trove.nla.gov.au/work/38470066).
    The mapping was developed by Abigail Baca, GFDRR.

    Input
        E: Vector object representing the OSM data

    Output:
        Vector object like E, but with one new attribute ('VCLASS')
        representing the vulnerability class used in the Padang dataset


    Algorithm

    1. Class the "levels" field into height bands where 1-3 = low,
       4-10 = mid, >10 = high
    2. Where height band = mid then building type = 4
       "RC medium rise Frame with Masonry in-fill walls"
    3. Where height band = high then building type = 6
       "Concrete Shear wall high rise* Hazus C2H"
    4. Where height band = low and structure = (plastered or
       reinforced_masonry) then building type = 7
       "RC low rise Frame with Masonry in-fill walls"
    5. Where height band = low and structure = confined_masonry then
       building type = 8 "Confined Masonry"
    6. Where height band = low and structure = unreinforced_masonry then
       building type = 2 "URM with Metal Roof"
    """

    # Input check
    required = ['levels', 'structure']
    actual = E.get_attribute_names()
    msg = ('Input data to osm2padang must have attributes %s. '
           'It has %s' % (str(required), str(actual)))
    for attribute in required:
        verify(attribute in actual, msg)

    # Start mapping
    N = len(E)
    attributes = E.get_data()
    count = 0
    for i in range(N):
        levels = E.get_data('levels', i)
        structure = E.get_data('structure', i)
        if levels is None or structure is None:
            vulnerability_class = 2
            count += 1
        else:
            # Map string variable levels to integer
            if levels.endswith('+'):
                levels = 100

            try:
                levels = int(levels)
            except:
                # E.g. 'ILP jalan'
                vulnerability_class = 2
                count += 1
            else:
                # Start mapping depending on levels
                if levels >= 10:
                    # High
                    vulnerability_class = 6  # Concrete shear
                elif 4 <= levels < 10:
                    # Mid
                    vulnerability_class = 4  # RC mid
                elif 1 <= levels < 4:
                    # Low
                    if structure in ['plastered',
                                     'reinforced masonry',
                                     'reinforced_masonry']:
                        vulnerability_class = 7  # RC low
                    elif structure == 'confined_masonry':
                        vulnerability_class = 8  # Confined
                    elif 'kayu' in structure or 'wood' in structure:
                        vulnerability_class = 9  # Wood
                    else:
                        vulnerability_class = 2  # URM
                elif numpy.allclose(levels, 0):
                    # A few buildings exist with 0 levels.

                    # In general, we should be assigning here the most
                    # frequent building in the area which could be defined
                    # by admin boundaries.
                    vulnerability_class = 2
                else:
                    msg = 'Unknown number of levels: %s' % levels
                    raise Exception(msg)

        # Store new attribute value
        attributes[i]['VCLASS'] = vulnerability_class

        # Selfcheck for use with osm_080811.shp
        if E.get_name() == 'osm_080811':
            if levels > 0:
                msg = ('Got %s expected %s. levels = %f, structure = %s'
                       % (vulnerability_class,
                          attributes[i]['TestBLDGCl'],
                          levels,
                          structure))
                verify(numpy.allclose(attributes[i]['TestBLDGCl'],
                                      vulnerability_class), msg)

    #print 'Got %i without levels or structure (out of %i total)' % (count, N)

    # Create new vector instance and return
    V = Vector(data=attributes,
               projection=E.get_projection(),
               geometry=E.get_geometry(),
               name=E.get_name() + ' mapped to Padang vulnerability classes',
               keywords=E.get_keywords())
    return V
Beispiel #28
0
    def write_to_file(self, filename):
        """Save vector data to file

        Input
            filename: filename with extension .shp or .gml

        Note, if attribute names are longer than 10 characters they will be
        truncated. This is due to limitations in the shp file driver and has
        to be done here since gdal v1.7 onwards has changed its handling of
        this issue: http://www.gdal.org/ogr/drv_shapefile.html
        """

        # Check file format
        basename, extension = os.path.splitext(filename)

        msg = ('Invalid file type for file %s. Only extensions '
               'shp or gml allowed.' % filename)
        verify(extension == '.shp' or extension == '.gml', msg)
        driver = DRIVER_MAP[extension]

        # FIXME (Ole): Tempory flagging of GML issue (ticket #18)
        if extension == '.gml':
            msg = ('OGR GML driver does not store geospatial reference.'
                   'This format is disabled for the time being. See '
                   'https://github.com/AIFDR/riab/issues/18')
            raise Exception(msg)

        # Derive layername from filename (excluding preceding dirs)
        layername = os.path.split(basename)[-1]

        # Get vector data
        geometry = self.get_geometry()
        data = self.get_data()

        N = len(geometry)

        # Clear any previous file of this name (ogr does not overwrite)
        try:
            os.remove(filename)
        except:
            pass

        # Create new file with one layer
        drv = ogr.GetDriverByName(driver)
        if drv is None:
            msg = 'OGR driver %s not available' % driver
            raise Exception(msg)

        ds = drv.CreateDataSource(filename)
        if ds is None:
            msg = 'Creation of output file %s failed' % filename
            raise Exception(msg)

        lyr = ds.CreateLayer(layername,
                             self.projection.spatial_reference,
                             self.geometry_type)
        if lyr is None:
            msg = 'Could not create layer %s' % layername
            raise Exception(msg)

        # Define attributes if any
        store_attributes = False
        if data is not None:
            if len(data) > 0:
                try:
                    fields = data[0].keys()
                except:
                    msg = ('Input parameter "attributes" was specified '
                           'but it does not contain dictionaries with '
                           'field information as expected. The first'
                           'element is %s' % data[0])
                    raise Exception(msg)
                else:
                    # Establish OGR types for each element
                    ogrtypes = {}
                    for name in fields:
                        att = data[0][name]
                        py_type = type(att)
                        msg = ('Unknown type for storing vector '
                               'data: %s, %s' % (name, str(py_type)[1:-1]))
                        verify(py_type in TYPE_MAP, msg)
                        ogrtypes[name] = TYPE_MAP[py_type]

            else:
                msg = ('Input parameter "data" was specified '
                       'but appears to be empty')
                raise Exception(msg)

            # Create attribute fields in layer
            store_attributes = True
            for name in fields:
                fd = ogr.FieldDefn(name, ogrtypes[name])
                # FIXME (Ole): Trying to address issue #16
                #              But it doesn't work and
                #              somehow changes the values of MMI in test
                #width = max(128, len(name))
                #print name, width
                #fd.SetWidth(width)

                # Silent handling of warnings like
                # Warning 6: Normalized/laundered field name:
                #'CONTENTS_LOSS_AUD' to 'CONTENTS_L'
                gdal.PushErrorHandler('CPLQuietErrorHandler')
                if lyr.CreateField(fd) != 0:
                    msg = 'Could not create field %s' % name
                    raise Exception(msg)

                # Restore error handler
                gdal.PopErrorHandler()

        # Store geometry
        geom = ogr.Geometry(self.geometry_type)
        layer_def = lyr.GetLayerDefn()
        for i in range(N):
            # Create new feature instance
            feature = ogr.Feature(layer_def)

            # Store geometry and check
            if self.is_point_data:
                x = float(geometry[i][0])
                y = float(geometry[i][1])
                geom.SetPoint_2D(0, x, y)
            elif self.is_polygon_data:
                wkt = array2wkt(geometry[i], geom_type='POLYGON')
                geom = ogr.CreateGeometryFromWkt(wkt)
            elif self.is_line_data:
                wkt = array2wkt(geometry[i], geom_type='LINESTRING')
                geom = ogr.CreateGeometryFromWkt(wkt)
            else:
                msg = 'Geometry type %s not implemented' % self.geometry_type
                raise Exception(msg)

            feature.SetGeometry(geom)

            G = feature.GetGeometryRef()
            if G is None:
                msg = 'Could not create GeometryRef for file %s' % filename
                raise Exception(msg)

            # Store attributes
            if store_attributes:
                for j, name in enumerate(fields):
                    actual_field_name = layer_def.GetFieldDefn(j).GetNameRef()

                    val = data[i][name]

                    if type(val) == numpy.ndarray:
                        # A singleton of type <type 'numpy.ndarray'> works
                        # for gdal version 1.6 but fails for version 1.8
                        # in SetField with error: NotImplementedError:
                        # Wrong number of arguments for overloaded function
                        val = float(val)
                    elif val is None:
                        val = ''

                    feature.SetField(actual_field_name, val)

            # Save this feature
            if lyr.CreateFeature(feature) != 0:
                msg = 'Failed to create feature %i in file %s' % (i, filename)
                raise Exception(msg)

            feature.Destroy()

        # Write keywords if any
        write_keywords(self.keywords, basename + '.keywords')