def geotransform2resolution(geotransform, isotropic=False, rtol=1.0e-6, atol=1.0e-8): """Convert geotransform to resolution Args: * geotransform: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). See e.g. http://www.gdal.org/gdal_tutorial.html * isotropic: If True, verify that dx == dy and return dx If False (default) return 2-tuple (dx, dy) * rtol, atol: Used to control how close dx and dy must be to quality for isotropic. These are passed on to numpy.allclose for comparison. Returns: * resolution: grid spacing (resx, resy) in (positive) decimal degrees ordered as longitude first, then latitude. or resx (if isotropic is True) """ resx = geotransform[1] # w-e pixel resolution resy = -geotransform[5] # n-s pixel resolution (always negative) if isotropic: msg = ( "Resolution requested with " "isotropic=True, but " "resolutions in the horizontal and vertical " "are different: resx = %.12f, resy = %.12f. " % (resx, resy) ) verify(numpy.allclose(resx, resy, rtol=rtol, atol=atol), msg) return resx else: return resx, resy
def convert_polygons_to_centroids(V): """Convert polygon vector data to point vector data :param V: Vector layer with polygon data :type V: Vector :returns: Vector layer with point data and the same attributes as V :rtype: Vector """ msg = 'Input data %s must be polygon vector data' % V verify(V.is_polygon_data, msg) geometry = V.get_geometry() N = len(V) # Calculate points for each polygon centroids = [] for i in range(N): c = calculate_polygon_centroid(geometry[i]) centroids.append(c) # Create new point vector layer with same attributes and return V = Vector(data=V.get_data(), projection=V.get_projection(), geometry=centroids, name='%s_centroid_data' % V.get_name(), keywords=V.get_keywords()) return V
def rings_equal(x, y, rtol=1.0e-6, atol=1.0e-8): """Compares to linear rings as numpy arrays Args * x, y: Nx2 numpy arrays Returns: * True if x == y or x' == y (up to the specified tolerance) where x' is x reversed in the first dimension. This corresponds to linear rings being seen as equal irrespective of whether they are organised in clock wise or counter clock wise order """ x = ensure_numeric(x, numpy.float) y = ensure_numeric(y, numpy.float) msg = 'Arrays must a 2d arrays of vertices. I got %s and %s' % (x, y) verify(len(x.shape) == 2 and len(y.shape) == 2, msg) msg = 'Arrays must have two columns. I got %s and %s' % (x, y) verify(x.shape[1] == 2 and y.shape[1] == 2, msg) if (numpy.allclose(x, y, rtol=rtol, atol=atol) or numpy.allclose(x, y[::-1], rtol=rtol, atol=atol)): return True else: return False
def bboxstring2list(bbox_string): """Convert bounding box string to list Args: * bbox_string: String of bounding box coordinates of the form 'W,S,E,N' Returns: * bbox: List of floating point numbers with format [W, S, E, N] """ msg = ('Bounding box must be a string with coordinates following the ' 'format 105.592,-7.809,110.159,-5.647\n' 'Instead I got %s of type %s.' % (str(bbox_string), type(bbox_string))) verify(isinstance(bbox_string, basestring), msg) fields = bbox_string.split(',') msg = ('Bounding box string must have 4 coordinates in the form ' '"W,S,E,N". I got bbox == "%s"' % bbox_string) try: verify(len(fields) == 4, msg) except VerificationError: raise BoundingBoxError(msg) for x in fields: try: float(x) except ValueError, e: msg = ('Bounding box %s contained non-numeric entry %s, ' 'original error was "%s".' % (bbox_string, x, e)) raise BoundingBoxError(msg)
def bboxlist2string(bbox, decimals=6): """Convert bounding box list to comma separated string Args: * bbox: List of coordinates of the form [W, S, E, N] Returns: * bbox_string: Format 'W,S,E,N' - each will have 6 decimal points """ msg = 'Got string %s, but expected bounding box as a list' % str(bbox) verify(not isinstance(bbox, basestring), msg) try: bbox = list(bbox) except: msg = 'Could not coerce bbox %s into a list' % str(bbox) raise BoundingBoxError(msg) msg = ('Bounding box must have 4 coordinates [W, S, E, N]. ' 'I got %s' % str(bbox)) try: verify(len(bbox) == 4, msg) except VerificationError: raise BoundingBoxError(msg) for x in bbox: try: float(x) except ValueError, e: msg = ('Bounding box %s contained non-numeric entry %s, ' 'original error was "%s".' % (bbox, x, e)) raise BoundingBoxError(msg)
def grid_to_points(A, x, y): """Convert grid data to point data :param A: Array of pixel values :type A: numpy.ndarray :param x: Longitudes corresponding to columns in A (west->east) :type x: numpy.ndarray :param y: Latitudes corresponding to rows in A (south->north) :type y: numpy.ndarray Returns: * P: Nx2 array of point coordinates * V: N array of point values """ msg = ('Longitudes must be increasing (west to east). I got %s' % str(x)) verify(x[0] < x[1], msg) msg = ('Latitudes must be increasing (south to north). I got %s' % str(y)) verify(y[0] < y[1], msg) # Create Nx2 array of x, y points corresponding to each # element in A. points = axes_to_points(x, y) # Create flat 1D row-major view of A cast as # one column vector of length MxN where M, N = A.shape # values = A.reshape((-1, 1)) values = A.reshape(-1) # Return Nx3 array with rows: x, y, value return points, values
def convert_polygons_to_centroids(V): """Convert polygon vector data to point vector data Args: * V: Vector layer with polygon data Returns: * Vector layer with point data and the same attributes as V """ msg = 'Input data %s must be polygon vector data' % V verify(V.is_polygon_data, msg) geometry = V.get_geometry() N = len(V) # Calculate points for each polygon centroids = [] for i in range(N): c = calculate_polygon_centroid(geometry[i]) centroids.append(c) # Create new point vector layer with same attributes and return V = Vector(data=V.get_data(), projection=V.get_projection(), geometry=centroids, name='%s_centroid_data' % V.get_name(), keywords=V.get_keywords()) return V
def _keywords_to_string(keywords, sublayer=None): """Create a string from a keywords dict. Args: * keywords: A required dictionary containing the keywords to stringify. * sublayer: str optional group marker for a sub layer. Returns: str: a String containing the rendered keywords list Raises: Any exceptions are propogated. .. note: Only simple keyword dicts should be passed here, not multilayer dicts. For example you pass a dict like this:: {'datatype': 'osm', 'category': 'exposure', 'title': 'buildings_osm_4326', 'subcategory': 'building', 'purpose': 'dki'} and the following string would be returned: datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki If sublayer is provided e.g. _keywords_to_string(keywords, sublayer='foo'), the following: [foo] datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki """ # Write result = get_unicode("") if sublayer is not None: result = "[%s]\n" % sublayer for k, value in keywords.items(): # Create key msg = "Key in keywords dictionary must be a string. " "I got %s with type %s" % (k, str(type(k))[1:-1]) verify(isinstance(k, basestring), msg) key = k msg = 'Key in keywords dictionary must not contain the ":" ' 'character. I got "%s"' % key verify(":" not in key, msg) # Store result += "%s: %s\n" % (key, value) return result
def interpolate_raster_vector(source, target, layer_name=None, attribute_name=None, mode='linear'): """Interpolate from raster layer to vector data Args: * source: Raster data set (grid) * target: Vector data set (points or polygons) * layer_name: Optional name of returned interpolated layer. If None the name of V is used for the returned layer. * attribute_name: Name for new attribute. If None (default) the name of R is used Returns: I: Vector data set; points located as target with values interpolated from source Note: If target geometry is polygon, data will be interpolated to its centroids and the output is a point data set. """ # Input checks verify(source.is_raster) verify(target.is_vector) if target.is_point_data: # Interpolate from raster to point data R = interpolate_raster_vector_points(source, target, layer_name=layer_name, attribute_name=attribute_name, mode=mode) # elif target.is_line_data: # TBA - issue https://github.com/AIFDR/inasafe/issues/36 # elif target.is_polygon_data: # Use centroids, in case of polygons P = convert_polygons_to_centroids(target) R = interpolate_raster_vector_points(source, P, layer_name=layer_name, attribute_name=attribute_name, mode=mode) # In case of polygon data, restore the polygon geometry # Do this setting the geometry of the returned set to # that of the original polygon R = Vector(data=R.get_data(), projection=R.get_projection(), geometry=target.get_geometry(), name=R.get_name()) else: msg = ('Unknown datatype for raster2vector interpolation: ' 'I got %s' % str(target)) raise InaSAFEError(msg) # Return interpolated vector layer return R
def bbox_intersection(*args): """Compute intersection between two or more bounding boxes Args: * args: two or more bounding boxes. Each is assumed to be a list or a tuple with four coordinates (W, S, E, N) Returns: * result: The minimal common bounding box """ msg = 'Function bbox_intersection must take at least 2 arguments.' verify(len(args) > 1, msg) result = [-180, -90, 180, 90] for a in args: if a is None: continue msg = ('Bounding box expected to be a list of the ' 'form [W, S, E, N]. ' 'Instead i got "%s"' % str(a)) try: box = list(a) except: raise Exception(msg) if not len(box) == 4: raise BoundingBoxError(msg) msg = ('Western boundary must be less than or equal to eastern. ' 'I got %s' % box) if not box[0] <= box[2]: raise BoundingBoxError(msg) msg = ('Southern boundary must be less than or equal to northern. ' 'I got %s' % box) if not box[1] <= box[3]: raise BoundingBoxError(msg) # Compute intersection # West and South for i in [0, 1]: result[i] = max(result[i], box[i]) # East and North for i in [2, 3]: result[i] = min(result[i], box[i]) # Check validity and return if result[0] <= result[2] and result[1] <= result[3]: return result else: return None
def interpolate_raster_vector(source, target, layer_name=None, attribute_name=None, mode='linear'): """Interpolate from raster layer to vector data Args: * source: Raster data set (grid) * target: Vector data set (points or polygons) * layer_name: Optional name of returned interpolated layer. If None the name of V is used for the returned layer. * attribute_name: Name for new attribute. If None (default) the name of R is used Returns: I: Vector data set; points located as target with values interpolated from source Note: If target geometry is polygon, data will be interpolated to its centroids and the output is a point data set. """ # Input checks verify(source.is_raster) verify(target.is_vector) if target.is_point_data: # Interpolate from raster to point data R = interpolate_raster_vector_points( source, target, layer_name=layer_name, attribute_name=attribute_name, mode=mode) #elif target.is_line_data: # TBA - issue https://github.com/AIFDR/inasafe/issues/36 # elif target.is_polygon_data: # Use centroids, in case of polygons P = convert_polygons_to_centroids(target) R = interpolate_raster_vector_points( source, P, layer_name=layer_name, attribute_name=attribute_name) # In case of polygon data, restore the polygon geometry # Do this setting the geometry of the returned set to # that of the original polygon R = Vector( data=R.get_data(), projection=R.get_projection(), geometry=target.get_geometry(), name=R.get_name()) else: msg = ('Unknown datatype for raster2vector interpolation: ' 'I got %s' % str(target)) raise InaSAFEError(msg) # Return interpolated vector layer return R
def geotransform_to_axes(G, nx, ny): """Convert geotransform to coordinate axes :param G: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). :type G: tuple :param nx: Number of cells in the w-e direction :type nx: int :param ny: Number of cells in the n-s direction :type nx: int :returns: Two vectors (longitudes and latitudes) representing the grid defined by the geotransform. The values are offset by half a pixel size to correspond to pixel registration. I.e. If the grid origin (top left corner) is (105, 10) and the resolution is 1 degrees in each direction, then the vectors will take the form longitudes = [100.5, 101.5, ..., 109.5] latitudes = [0.5, 1.5, ..., 9.5] """ lon_ul = float(G[0]) # Longitude of upper left corner lat_ul = float(G[3]) # Latitude of upper left corner dx = float(G[1]) # Longitudinal resolution dy = -float(G[5]) # Latitudinal resolution (always(?) negative) verify(dx > 0) verify(dy > 0) # Coordinates of lower left corner lon_ll = lon_ul lat_ll = lat_ul - ny * dy # Coordinates of upper right corner lon_ur = lon_ul + nx * dx # Define pixel centers along each directions # This is to achieve pixel registration rather # than gridline registration dx2 = dx / 2 dy2 = dy / 2 # Define longitudes and latitudes for each axes x = numpy.linspace(lon_ll + dx2, lon_ur - dx2, nx) y = numpy.linspace(lat_ll + dy2, lat_ul - dy2, ny) # Return return x, y
def dom2object(node): """Convert DOM representation to XML_object hierarchy. """ value = [] textnode_encountered = None for n in node.childNodes: if n.nodeType == 3: # Child is a text element - omit the dom tag #text and # go straight to the text value. # Note - only the last text value will be recorded msg = 'Text element has child nodes - this shouldn\'t happen' verify(len(n.childNodes) == 0, msg) x = n.nodeValue.strip() if len(x) == 0: # Skip empty text children continue textnode_encountered = value = x else: # XML element if textnode_encountered is not None: msg = 'A text node was followed by a non-text tag. This is not allowed.\n' msg += 'Offending text node: "%s" ' %str(textnode_encountered) msg += 'was followed by node named: "<%s>"' %str(n.nodeName) raise Exception, msg value.append(dom2object(n)) # Deal with empty elements if len(value) == 0: value = '' if node.nodeType == 9: # Root node (document) tag = None else: # Normal XML node tag = node.nodeName X = XML_element(tag=tag, value=value) return X
def tag_polygons_by_grid(polygons, grid, threshold=0, tag='affected'): """Tag polygons by raster values Args: * polygons: Polygon layer * grid: Raster layer * threshold: Threshold for grid value to tag polygon * tag: Name of new tag Returns: Polygon layer: Same as input polygon but with extra attribute tag set according to grid values """ verify(polygons.is_polygon_data) verify(grid.is_raster) polygon_attributes = polygons.get_data() polygon_geometry = polygons.get_geometry(as_geometry_objects=True) # Separate grid points by polygon res, _ = clip_grid_by_polygons( grid.get_data(), grid.get_geotransform(), polygon_geometry) # Create new polygon layer with tag set according to grid values # and threshold new_attributes = [] for i, (_, values) in enumerate(res): # For each polygon check if any grid value in it exceeds the threshold affected = False for val in values: # Check each grid value in this polygon if val > threshold: affected = True # Existing attributes for this polygon attr = polygon_attributes[i].copy() # Create tagged polygon feature if affected: attr[tag] = True else: attr[tag] = False new_attributes.append(attr) R = Vector(data=new_attributes, projection=polygons.get_projection(), geometry=polygon_geometry, name='%s_tagged_by_%s' % (polygons.name, grid.name)) return R
def copy_keywords( self, source_layer, destination_file, extra_keywords=None): """Helper to copy the keywords file from a source to a target dataset. e.g.:: copyKeywords('foo.shp', 'bar.shp') Will result in the foo.keywords file being copied to bar.keyword. Optional argument extraKeywords is a dictionary with additional keywords that will be added to the destination file e.g:: copyKeywords('foo.shp', 'bar.shp', {'resolution': 0.01}) :param source_layer: A QGIS QgsMapLayer instance. :type source_layer: QgsMapLayer :param destination_file: The output filename that should be used to store the keywords in. It can be a .shp or a .keywords for example since the suffix will always be replaced with .keywords. :type destination_file: str :param extra_keywords: A dict containing all the extra keywords to be written for the layer. The written keywords will consist of any original keywords from the source layer's keywords file and and the extra keywords (which will replace the source layers keywords if the key is identical). :type extra_keywords: dict """ keywords = self.read_keywords(source_layer) if extra_keywords is None: extra_keywords = {} message = self.tr( 'Expected extraKeywords to be a dictionary. Got ' '%s' % str(type(extra_keywords))[1:-1]) verify(isinstance(extra_keywords, dict), message) # compute the output keywords file name destination_base = os.path.splitext(destination_file)[0] new_destination = destination_base + '.keywords' # write the extra keywords into the source dict try: for key in extra_keywords: keywords[key] = extra_keywords[key] write_keywords_to_file(new_destination, keywords) except Exception, e: message = self.tr( 'Failed to copy keywords file from : \n%s\nto\n%s: %s' % ( source_layer.source(), new_destination, str(e))) raise Exception(message)
def tag_polygons_by_grid(polygons, grid, threshold=0, tag='affected'): """Tag polygons by raster values Args: * polygons: Polygon layer * grid: Raster layer * threshold: Threshold for grid value to tag polygon * tag: Name of new tag Returns: Polygon layer: Same as input polygon but with extra attribute tag set according to grid values """ verify(polygons.is_polygon_data) verify(grid.is_raster) polygon_attributes = polygons.get_data() polygon_geometry = polygons.get_geometry(as_geometry_objects=True) # Separate grid points by polygon res = clip_grid_by_polygons(grid.get_data(), grid.get_geotransform(), polygon_geometry) # Create new polygon layer with tag set according to grid values # and threshold new_attributes = [] for i, (_, values) in enumerate(res): # For each polygon check if any grid value in it exceeds the threshold affected = False for val in values: # Check each grid value in this polygon if val > threshold: affected = True # Existing attributes for this polygon attr = polygon_attributes[i].copy() # Create tagged polygon feature if affected: attr[tag] = True else: attr[tag] = False new_attributes.append(attr) R = Vector(data=new_attributes, projection=polygons.get_projection(), geometry=polygon_geometry, name='%s_tagged_by_%s' % (polygons.name, grid.name)) return R
def sigab2bnpb(E, target_attribute='VCLASS'): """Map SIGAB point data to BNPB vulnerability classes Input E: Vector object representing the OSM data target_attribute: Optional name of the attribute containing the mapped vulnerability class. Default value is 'VCLASS' Output: Vector object like E, but with one new attribute (e.g. 'VCLASS') representing the vulnerability class used in the guidelines """ # Input check required = ['Struktur_B', 'Lantai', 'Atap', 'Dinding', 'Tingkat'] actual = E.get_attribute_names() msg = ('Input data to sigab2bnpb must have attributes %s. ' 'It has %s' % (str(required), str(actual))) for attribute in required: verify(attribute in actual, msg) # Start mapping N = len(E) attributes = E.get_data() for i in range(N): levels = E.get_data('Tingkat', i).lower() structure = E.get_data('Struktur_B', i).lower() roof_type = E.get_data('Atap', i).lower() wall_type = E.get_data('Dinding', i).lower() floor_type = E.get_data('Lantai', i).lower() if levels == 'none' or structure == 'none': vulnerability_class = 'URM' elif structure.startswith('beton') or structure.startswith('kayu'): vulnerability_class = 'RM' else: if int(levels) >= 2: vulnerability_class = 'RM' else: vulnerability_class = 'URM' # Store new attribute value attributes[i][target_attribute] = vulnerability_class # Create new vector instance and return V = Vector(data=attributes, projection=E.get_projection(), geometry=E.get_geometry(), name=E.get_name() + ' mapped to BNPB vulnerability classes', keywords=E.get_keywords()) return V
def axes_to_points(x, y): """Generate all combinations of grid point coordinates from x and y axes :param x: x coordinates (array) :type x: numpy.ndarray :param y: y coordinates (array) :type y: numpy.ndarray :returns: * P: Nx2 array consisting of coordinates for all grid points defined by x and y axes. The x coordinate will vary the fastest to match the way 2D numpy arrays are laid out by default ('C' order). That way, the x and y coordinates will match a corresponding 2D array A when flattened (A.flat[:] or A.reshape(-1)) Note: Example x = [1, 2, 3] y = [10, 20] P = [[1, 10], [2, 10], [3, 10], [1, 20], [2, 20], [3, 20]] """ # Reverse y coordinates to have them start at bottom of array y = numpy.flipud(y) # Repeat x coordinates for each y (fastest varying) # noinspection PyTypeChecker X = numpy.kron(numpy.ones(len(y)), x) # Repeat y coordinates for each x (slowest varying) Y = numpy.kron(y, numpy.ones(len(x))) # Check N = len(X) verify(len(Y) == N) # Create Nx2 array of x and y coordinates X = numpy.reshape(X, (N, 1)) Y = numpy.reshape(Y, (N, 1)) P = numpy.concatenate((X, Y), axis=1) # Return return P
def sigab2bnpb(E, target_attribute='VCLASS'): """Map SIGAB point data to BNPB vulnerability classes Input E: Vector object representing the OSM data target_attribute: Optional name of the attribute containing the mapped vulnerability class. Default value is 'VCLASS' Output: Vector object like E, but with one new attribute (e.g. 'VCLASS') representing the vulnerability class used in the guidelines """ # Input check required = ['Struktur_B', 'Lantai', 'Atap', 'Dinding', 'Tingkat'] actual = E.get_attribute_names() msg = ('Input data to sigab2bnpb must have attributes %s. ' 'It has %s' % (str(required), str(actual))) for attribute in required: verify(attribute in actual, msg) # Start mapping N = len(E) attributes = E.get_data() for i in range(N): levels = E.get_data('Tingkat', i).lower() structure = E.get_data('Struktur_B', i).lower() # roof_type = E.get_data('Atap', i).lower() # wall_type = E.get_data('Dinding', i).lower() # floor_type = E.get_data('Lantai', i).lower() if levels == 'none' or structure == 'none': vulnerability_class = 'URM' elif structure.startswith('beton') or structure.startswith('kayu'): vulnerability_class = 'RM' else: if int(levels) >= 2: vulnerability_class = 'RM' else: vulnerability_class = 'URM' # Store new attribute value attributes[i][target_attribute] = vulnerability_class # Create new vector instance and return V = Vector(data=attributes, projection=E.get_projection(), geometry=E.get_geometry(), name=E.get_name() + ' mapped to BNPB vulnerability classes', keywords=E.get_keywords()) return V
def calculate_polygon_area(polygon, signed=False): """Calculate the signed area of non-self-intersecting polygon :param polygon: Numeric array of points (longitude, latitude). It is assumed to be closed, i.e. first and last points are identical :type polygon: numpy.ndarray :param signed: Optional flag deciding whether returned area retains its sign: If points are ordered counter clockwise, the signed area will be positive. If points are ordered clockwise, it will be negative Default is False which means that the area is always positive. :type signed: bool :returns: area: Area of polygon (subject to the value of argument signed) :rtype: numpy.ndarray Note: Sources http://paulbourke.net/geometry/polyarea/ http://en.wikipedia.org/wiki/Centroid """ # Make sure it is numeric P = numpy.array(polygon) msg = ('Polygon is assumed to consist of coordinate pairs. ' 'I got second dimension %i instead of 2' % P.shape[1]) verify(P.shape[1] == 2, msg) x = P[:, 0] y = P[:, 1] # Calculate 0.5 sum_{i=0}^{N-1} (x_i y_{i+1} - x_{i+1} y_i) a = x[:-1] * y[1:] b = y[:-1] * x[1:] A = numpy.sum(a - b) / 2. if signed: return A else: return abs(A)
def write_to_file(self, filename): """Save raster data to file Args: * filename: filename with extension .tif Gdal documentation at: http://www.gdal.org/classGDALRasterBand.html """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extension ' 'tif allowed.' % filename) verify(extension in ['.tif'], msg) file_format = DRIVER_MAP[extension] # Get raster data A = self.get_data() # Get Dimensions. Note numpy and Gdal swap order N, M = A.shape # Create empty file. # FIXME (Ole): It appears that this is created as single # precision even though Float64 is specified # - see issue #17 driver = gdal.GetDriverByName(file_format) fid = driver.Create(get_string(filename), M, N, 1, gdal.GDT_Float64) if fid is None: msg = ('Gdal could not create filename %s using ' 'format %s' % (filename, file_format)) raise WriteLayerError(msg) self.filename = filename # Write metadata fid.SetProjection(str(self.projection)) fid.SetGeoTransform(self.geotransform) # Write data fid.GetRasterBand(1).WriteArray(A) fid.GetRasterBand(1).SetNoDataValue(self.get_nodata_value()) # noinspection PyUnusedLocal fid = None # Close # Write keywords if any write_iso19115_metadata(filename, self.keywords)
def raster_geometry_to_geotransform(longitudes, latitudes): """Convert vectors of longitudes and latitudes to geotransform Note: This is the inverse operation of Raster.get_geometry(). :param longitudes: Vectors of geographic coordinates :type longitudes: :param latitudes: Vectors of geographic coordinates :type latitudes: :returns: geotransform: 6-tuple (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution) """ nx = len(longitudes) ny = len(latitudes) msg = ('You must specify more than 1 longitude to make geotransform: ' 'I got %s' % str(longitudes)) verify(nx > 1, msg) msg = ('You must specify more than 1 latitude to make geotransform: ' 'I got %s' % str(latitudes)) verify(ny > 1, msg) dx = float(longitudes[1] - longitudes[0]) # Longitudinal resolution dy = float(latitudes[0] - latitudes[1]) # Latitudinal resolution (neg) # Define pixel centers along each directions # This is to achieve pixel registration rather # than gridline registration dx2 = dx / 2 dy2 = dy / 2 geotransform = ( longitudes[0] - dx2, # Longitude of upper left corner dx, # w-e pixel resolution 0, # rotation latitudes[-1] - dy2, # Latitude of upper left corner 0, # rotation dy) # n-s pixel resolution return geotransform
def raster_geometry_to_geotransform(longitudes, latitudes): """Convert vectors of longitudes and latitudes to geotransform Note: This is the inverse operation of Raster.get_geometry(). :param longitudes: Vectors of geographic coordinates :type longitudes: :param latitudes: Vectors of geographic coordinates :type latitudes: :returns: geotransform: 6-tuple (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution) """ nx = len(longitudes) ny = len(latitudes) msg = ('You must specify more than 1 longitude to make geotransform: ' 'I got %s' % str(longitudes)) verify(nx > 1, msg) msg = ('You must specify more than 1 latitude to make geotransform: ' 'I got %s' % str(latitudes)) verify(ny > 1, msg) dx = float(longitudes[1] - longitudes[0]) # Longitudinal resolution dy = float(latitudes[0] - latitudes[1]) # Latitudinal resolution (neg) # Define pixel centers along each directions # This is to achieve pixel registration rather # than gridline registration dx2 = dx / 2 dy2 = dy / 2 geotransform = (longitudes[0] - dx2, # Longitude of upper left corner dx, # w-e pixel resolution 0, # rotation latitudes[-1] - dy2, # Latitude of upper left corner 0, # rotation dy) # n-s pixel resolution return geotransform
def write_to_file(self, filename): """Save raster data to file Args: * filename: filename with extension .tif """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extension ' 'tif allowed.' % filename) verify(extension in ['.tif', '.asc'], msg) file_format = DRIVER_MAP[extension] # Get raster data A = self.get_data() # Get Dimensions. Note numpy and Gdal swap order N, M = A.shape # Create empty file. # FIXME (Ole): It appears that this is created as single # precision even though Float64 is specified # - see issue #17 driver = gdal.GetDriverByName(file_format) fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64) if fid is None: msg = ('Gdal could not create filename %s using ' 'format %s' % (filename, file_format)) raise WriteLayerError(msg) self.filename = filename # Write metada fid.SetProjection(str(self.projection)) fid.SetGeoTransform(self.geotransform) # Write data fid.GetRasterBand(1).WriteArray(A) # Write keywords if any write_keywords(self.keywords, basename + '.keywords')
def check_geotransform(geotransform): """Check that geotransform is valid :param geotransform: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). See e.g. http://www.gdal.org/gdal_tutorial.html :type geotransform: tuple .. note:: This assumes that the spatial reference uses geographic coordinates, so will not work for projected coordinate systems. """ msg = ('Supplied geotransform must be a tuple with ' '6 numbers. I got %s' % str(geotransform)) verify(len(geotransform) == 6, msg) for x in geotransform: try: float(x) except TypeError: raise InaSAFEError(msg) # Check longitude msg = ('Element in 0 (first) geotransform must be a valid ' 'longitude. I got %s' % geotransform[0]) verify(-180 <= geotransform[0] <= 180, msg) # Check latitude msg = ('Element 3 (fourth) in geotransform must be a valid ' 'latitude. I got %s' % geotransform[3]) verify(-90 <= geotransform[3] <= 90, msg) # Check cell size msg = ('Element 1 (second) in geotransform must be a positive ' 'number. I got %s' % geotransform[1]) verify(geotransform[1] > 0, msg) msg = ('Element 5 (sixth) in geotransform must be a negative ' 'number. I got %s' % geotransform[1]) verify(geotransform[5] < 0, msg)
def check_geotransform(geotransform): """Check that geotransform is valid Args * geotransform: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). See e.g. http://www.gdal.org/gdal_tutorial.html Note This assumes that the spatial reference uses geographic coordinaties, so will not work for projected coordinate systems. """ msg = ('Supplied geotransform must be a tuple with ' '6 numbers. I got %s' % str(geotransform)) verify(len(geotransform) == 6, msg) for x in geotransform: try: float(x) except TypeError: raise InaSAFEError(msg) # Check longitude msg = ('Element in 0 (first) geotransform must be a valid ' 'longitude. I got %s' % geotransform[0]) verify(-180 <= geotransform[0] <= 180, msg) # Check latitude msg = ('Element 3 (fourth) in geotransform must be a valid ' 'latitude. I got %s' % geotransform[3]) verify(-90 <= geotransform[3] <= 90, msg) # Check cell size msg = ('Element 1 (second) in geotransform must be a positive ' 'number. I got %s' % geotransform[1]) verify(geotransform[1] > 0, msg) msg = ('Element 5 (sixth) in geotransform must be a negative ' 'number. I got %s' % geotransform[1]) verify(geotransform[5] < 0, msg)
def write_to_file(self, filename): """Save raster data to file Args: * filename: filename with extension .tif """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extension ' 'tif allowed.' % filename) verify(extension in ['.tif', '.asc'], msg) file_format = DRIVER_MAP[extension] # Get raster data A = self.get_data() # Get Dimensions. Note numpy and Gdal swap order N, M = A.shape # Create empty file. # FIXME (Ole): It appears that this is created as single # precision even though Float64 is specified # - see issue #17 driver = gdal.GetDriverByName(file_format) fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64) if fid is None: msg = ('Gdal could not create filename %s using ' 'format %s' % (filename, file_format)) raise WriteLayerError(msg) # Write metada fid.SetProjection(str(self.projection)) fid.SetGeoTransform(self.geotransform) # Write data fid.GetRasterBand(1).WriteArray(A) # Write keywords if any write_keywords(self.keywords, basename + '.keywords')
def interpolate_polygon_vector(source, target, layer_name=None, attribute_name=None): """Interpolate from polygon vector layer to vector data Args: * source: Vector data set (polygon) * target: Vector data set (points or polygons) - TBA also lines * layer_name: Optional name of returned interpolated layer. If None the name of target is used for the returned layer. * attribute_name: Name for new attribute. If None (default) the name of source is used Output I: Vector data set; points located as target with values interpolated from source Note: If target geometry is polygon, data will be interpolated to its centroids and the output is a point data set. """ # Input checks verify(source.is_vector) verify(target.is_vector) verify(source.is_polygon_data) if target.is_point_data: R = interpolate_polygon_points(source, target, layer_name=layer_name, attribute_name=attribute_name) elif target.is_line_data: R = interpolate_polygon_lines(source, target, layer_name=layer_name, attribute_name=attribute_name) elif target.is_polygon_data: # Use polygon centroids X = convert_polygons_to_centroids(target) P = interpolate_polygon_points(source, X, layer_name=layer_name, attribute_name=attribute_name) # In case of polygon data, restore the polygon geometry # Do this setting the geometry of the returned set to # that of the original polygon R = Vector(data=P.get_data(), projection=P.get_projection(), geometry=X.get_geometry(), name=P.get_name()) else: msg = ('Unknown datatype for polygon2vector interpolation: ' 'I got %s' % str(target)) raise InaSAFEError(msg) # Return interpolated vector layer return R
def check_inputs(hazard, exposure, layer_name, attribute_name): """Check inputs and establish default values Args: * hazard: Hazard layer instance (any type) * exposure: Exposure layer instance (any type) * layer_name: Name of returned layer or None * attribute_name: Name of interpolated attribute or None Returns: * layer_name * attribute_name Raises: VerificationError """ msg = "Projections must be the same: I got %s and %s" % (hazard.projection, exposure.projection) verify(hazard.projection == exposure.projection, msg) msg = "Parameter attribute_name must be either a string or None. " "I got %s" % (str(type(exposure)))[1:-1] verify(attribute_name is None or isinstance(attribute_name, basestring), msg) msg = "Parameter layer_name must be either a string or None. " "I got %s" % (str(type(exposure)))[1:-1] verify(layer_name is None or isinstance(layer_name, basestring), msg) # Establish default names if layer_name is None: layer_name = exposure.get_name() if hazard.is_raster and attribute_name is None: layer_name = hazard.get_name() return layer_name, attribute_name
def get_topN(self, attribute, N=10): """Get top N features Args: * attribute: The name of attribute where values are sought * N: How many Returns: * layer: New vector layer with selected features """ # Input checks msg = "Specfied attribute must be a string. " "I got %s" % (type(attribute)) verify(isinstance(attribute, basestring), msg) msg = "Specified attribute was empty" verify(attribute != "", msg) msg = "N must be a positive number. I got %i" % N verify(N > 0, msg) # Create list of values for specified attribute values = self.get_data(attribute) # Sort and select using Schwarzian transform A = zip(values, self.data, self.geometry) A.sort() # Pick top N and unpack data, geometry = zip(*A[-N:])[1:] # Create new Vector instance and return return Vector(data=data, projection=self.get_projection(), geometry=geometry, keywords=self.get_keywords())
def get_bins(self, N=10, quantiles=False): """Get N values between the min and the max occurred in this dataset. Return sorted list of length N+1 where the first element is min and the last is max. Intermediate values depend on the keyword quantiles: If quantiles is True, they represent boundaries between quantiles. If quantiles is False, they represent equidistant interval boundaries. """ rmin, rmax = self.get_extrema() levels = [] if quantiles is False: # Linear intervals d = (rmax - rmin) / N for i in range(N): levels.append(rmin + i * d) else: # Quantiles # FIXME (Ole): Not 100% sure about this algorithm, # but it is close enough A = self.get_data().flat[:] mask = numpy.logical_not(numpy.isnan(A)) # Omit NaN's A = A.compress(mask) A.sort() verify(len(A) == A.shape[0]) d = float(len(A) + 0.5) / N for i in range(N): levels.append(A[int(i * d)]) levels.append(rmax) return levels
def convert_line_to_points(V, delta): """Convert line vector data to point vector data :param V: Vector layer with line data :type V: Vector :param delta: Incremental step to find the points :type delta: float :returns: Vector layer with point data and the same attributes as V :rtype: Vector """ msg = 'Input data %s must be line vector data' % V verify(V.is_line_data, msg) geometry = V.get_geometry() data = V.get_data() N = len(V) # Calculate centroids for each polygon points = [] new_data = [] for i in range(N): c = points_along_line(geometry[i], delta) # We need to create a data entry for each point. # FIXME (Ole): What on earth is this? # pylint: disable=W0621 new_data.extend([data[i] for _ in c]) # pylint: enable=W0621 points.extend(c) # Create new point vector layer with same attributes and return V = Vector(data=new_data, projection=V.get_projection(), geometry=points, name='%s_point_data' % V.get_name(), keywords=V.get_keywords()) return V
def data(self): """Property for the data of this layer. The setter does a lazy read so that the data matrix is only initialised if is is actually wanted. :returns: A matrix containing the layer data or None if the layer has no band. :rtype: numpy.array """ if self._data is None: if self.band is None: return None # Read from raster file data = self.band.ReadAsArray() # Convert to double precision (issue #75) data = numpy.array(data, dtype=numpy.float64) # Self check M, N = data.shape msg = ( 'Dimensions of raster array do not match those of ' 'raster file %s' % self.filename) verify(M == self.rows, msg) verify(N == self.columns, msg) nodata = self.band.GetNoDataValue() if nodata is None: nodata = -9999 if nodata is not numpy.nan: NaN = numpy.ones((M, N), numpy.float64) * numpy.nan data = numpy.where(data == nodata, NaN, data) self._data = data return self._data
def bbox_intersection(*args): """Compute intersection between two or more bounding boxes Args: * args: two or more bounding boxes. Each is assumed to be a list or a tuple with four coordinates (W, S, E, N) Returns: * result: The minimal common bounding box """ msg = "Function bbox_intersection must take at least 2 arguments." verify(len(args) > 1, msg) result = [-180, -90, 180, 90] for a in args: msg = "Bounding box expected to be a list of the " "form [W, S, E, N]. " 'Instead i got "%s"' % str(a) try: box = list(a) except: raise Exception(msg) verify(len(box) == 4, msg) msg = "Western boundary must be less than eastern. I got %s" % box verify(box[0] < box[2], msg) msg = "Southern boundary must be less than northern. I got %s" % box verify(box[1] < box[3], msg) # Compute intersection # West and South for i in [0, 1]: result[i] = max(result[i], box[i]) # East and North for i in [2, 3]: result[i] = min(result[i], box[i]) # Check validity and return if result[0] < result[2] and result[1] < result[3]: return result else: return None
def geotransform2resolution(geotransform, isotropic=False, rtol=1.0e-6, atol=1.0e-8): """Convert geotransform to resolution Args: * geotransform: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). See e.g. http://www.gdal.org/gdal_tutorial.html * isotropic: If True, verify that dx == dy and return dx If False (default) return 2-tuple (dx, dy) * rtol, atol: Used to control how close dx and dy must be to quality for isotropic. These are passed on to numpy.allclose for comparison. Returns: * resolution: grid spacing (resx, resy) in (positive) decimal degrees ordered as longitude first, then latitude. or resx (if isotropic is True) """ resx = geotransform[1] # w-e pixel resolution resy = -geotransform[5] # n-s pixel resolution (always negative) if isotropic: msg = ('Resolution requested with ' 'isotropic=True, but ' 'resolutions in the horizontal and vertical ' 'are different: resx = %.12f, resy = %.12f. ' % (resx, resy)) verify(numpy.allclose(resx, resy, rtol=rtol, atol=atol), msg) return resx else: return resx, resy
def rings_equal(x, y, rtol=1.0e-6, atol=1.0e-8): """Compares to linear rings as numpy arrays :param x: A 2d array of the first ring :type x: numpy.ndarray :param y: A 2d array of the second ring :type y: numpy.ndarray :param rtol: The relative tolerance parameter :type rtol: float :param atol: The relative tolerance parameter :type rtol: float Returns: * True if x == y or x' == y (up to the specified tolerance) where x' is x reversed in the first dimension. This corresponds to linear rings being seen as equal irrespective of whether they are organised in clock wise or counter clock wise order """ x = ensure_numeric(x, numpy.float) y = ensure_numeric(y, numpy.float) msg = "Arrays must a 2d arrays of vertices. I got %s and %s" % (x, y) verify(len(x.shape) == 2 and len(y.shape) == 2, msg) msg = "Arrays must have two columns. I got %s and %s" % (x, y) verify(x.shape[1] == 2 and y.shape[1] == 2, msg) if numpy.allclose(x, y, rtol=rtol, atol=atol) or numpy.allclose(x, y[::-1], rtol=rtol, atol=atol): return True else: return False
def interpolate_polygon_raster(source, target, layer_name=None, attribute_name=None): """Interpolate from polygon layer to raster data Args * source: Polygon data set * target: Raster data set * layer_name: Optional name of returned interpolated layer. If None the name of source is used for the returned layer. * attribute_name: Name for new attribute. If None (default) the name of layer target is used Output I: Vector data set; points located as target with values interpolated from source Note: Each point in the resulting dataset will have an attribute 'polygon_id' which refers to the polygon it belongs to. """ # Input checks verify(target.is_raster) verify(source.is_vector) verify(source.is_polygon_data) # Run underlying clipping algorithm polygon_geometry = source.get_geometry(as_geometry_objects=True) polygon_attributes = source.get_data() res = clip_grid_by_polygons( target.get_data(scaling=False), target.get_geotransform(), polygon_geometry) # Create one new point layer with interpolated attributes new_geometry = [] new_attributes = [] for i, (geometry, values) in enumerate(res): # For each polygon assign attributes to points that fall inside it for j, geom in enumerate(geometry): attr = polygon_attributes[i].copy() # Attributes for this polygon attr[attribute_name] = values[j] # Attribute value from grid cell attr['polygon_id'] = i # Store id for associated polygon new_attributes.append(attr) new_geometry.append(geom) R = Vector( data=new_attributes, projection=source.get_projection(), geometry=new_geometry, name=layer_name) return R
def interpolate_polygon_raster(source, target, layer_name=None, attribute_name=None): """Interpolate from polygon layer to raster data Args * source: Polygon data set * target: Raster data set * layer_name: Optional name of returned interpolated layer. If None the name of source is used for the returned layer. * attribute_name: Name for new attribute. If None (default) the name of layer target is used Output I: Vector data set; points located as target with values interpolated from source Note: Each point in the resulting dataset will have an attribute 'polygon_id' which refers to the polygon it belongs to. """ # Input checks verify(target.is_raster) verify(source.is_vector) verify(source.is_polygon_data) # Run underlying clipping algorithm polygon_geometry = source.get_geometry(as_geometry_objects=True) polygon_attributes = source.get_data() res = clip_grid_by_polygons(target.get_data(scaling=False), target.get_geotransform(), polygon_geometry) # Create one new point layer with interpolated attributes new_geometry = [] new_attributes = [] for i, (geometry, values) in enumerate(res): # For each polygon assign attributes to points that fall inside it for j, geom in enumerate(geometry): attr = polygon_attributes[i].copy() # Attributes for this polygon attr[attribute_name] = values[j] # Attribute value from grid cell attr['polygon_id'] = i # Store id for associated polygon new_attributes.append(attr) new_geometry.append(geom) R = Vector(data=new_attributes, projection=source.get_projection(), geometry=new_geometry, name=layer_name) return R
def check_inputs(hazard, exposure, layer_name, attribute_name): """Check inputs and establish default values Args: * hazard: Hazard layer instance (any type) * exposure: Exposure layer instance (any type) * layer_name: Name of returned layer or None * attribute_name: Name of interpolated attribute or None Returns: * layer_name * attribute_name Raises: VerificationError """ msg = ('Projections must be the same: I got %s and %s' % (hazard.projection, exposure.projection)) verify(hazard.projection == exposure.projection, msg) msg = ('Parameter attribute_name must be either a string or None. ' 'I got %s' % (str(type(exposure)))[1:-1]) verify(attribute_name is None or isinstance(attribute_name, basestring), msg) msg = ('Parameter layer_name must be either a string or None. ' 'I got %s' % (str(type(exposure)))[1:-1]) verify(layer_name is None or isinstance(layer_name, basestring), msg) # Establish default names if layer_name is None: layer_name = exposure.get_name() if hazard.is_raster and attribute_name is None: layer_name = hazard.get_name() if (exposure.is_raster and hazard.is_vector and hazard.is_polygon_data and attribute_name is None): attribute_name = exposure.get_name() if (hazard.is_raster and exposure.is_vector and exposure.is_point_data and attribute_name is None): attribute_name = hazard.get_name() # Launder for shape files # FIXME (Ole): Remove when (if) we get rid of the shp format if attribute_name is not None: attribute_name = str(attribute_name[:10]) return layer_name, attribute_name
def get_data(self, attribute=None, index=None, copy=False): """Get vector attributes Note: Data is returned as a list where each entry is a dictionary of attributes for one feature. Entries in get_geometry() and get_data() are related as 1-to-1 If optional argument attribute is specified and a valid name, then the list of values for that attribute is returned. If optional argument index is specified on the that value will be returned. Any value of index is ignored if attribute is None. If optional argument copy is True and all attributes are requested, a copy will be returned. Otherwise a pointer to the data is returned. """ if hasattr(self, "data"): if attribute is None: if copy: return copy_module.deepcopy(self.data) else: return self.data else: msg = ( "Specified attribute %s does not exist in " "vector layer %s. Valid names are %s" "" % (attribute, self, self.data[0].keys()) ) verify(attribute in self.data[0], msg) if index is None: # Return all values for specified attribute return [x[attribute] for x in self.data] else: # Return value for specified attribute and index msg = "Specified index must be either None or " "an integer. I got %s" % index verify(isinstance(index, int), msg) msg = ( "Specified index must lie within the bounds " "of vector layer %s which is [%i, %i]" "" % (self, 0, len(self) - 1) ) verify(0 <= index < len(self), msg) return self.data[index][attribute] else: msg = "Vector data instance does not have any attributes" raise GetDataError(msg)
def get_data(self, attribute=None, index=None, copy=False): """Get vector attributes Note: Data is returned as a list where each entry is a dictionary of attributes for one feature. Entries in get_geometry() and get_data() are related as 1-to-1 If optional argument attribute is specified and a valid name, then the list of values for that attribute is returned. If optional argument index is specified on the that value will be returned. Any value of index is ignored if attribute is None. If optional argument copy is True and all attributes are requested, a copy will be returned. Otherwise a pointer to the data is returned. """ if hasattr(self, 'data'): if attribute is None: if copy: return copy_module.deepcopy(self.data) else: return self.data else: msg = ('Specified attribute %s does not exist in ' 'vector layer %s. Valid names are %s' '' % (attribute, self, self.data[0].keys())) verify(attribute in self.data[0], msg) if index is None: # Return all values for specified attribute return [x[attribute] for x in self.data] else: # Return value for specified attribute and index msg = ('Specified index must be either None or ' 'an integer. I got %s' % index) verify(type(index) == type(0), msg) msg = ('Specified index must lie within the bounds ' 'of vector layer %s which is [%i, %i]' '' % (self, 0, len(self) - 1)) verify(0 <= index < len(self), msg) return self.data[index][attribute] else: msg = 'Vector data instance does not have any attributes' raise GetDataError(msg)
def __init__(self, name=None, projection=None, keywords=None, style_info=None, sublayer=None): """Common constructor for all types of layers See docstrings for class Raster and class Vector for details. """ # Name msg = ('Specified name must be a string or None. ' 'I got %s with type %s' % (name, str(type(name))[1:-1])) verify(isinstance(name, basestring) or name is None, msg) self.name = name # Projection self.projection = Projection(projection) # Keywords if keywords is None: self.keywords = {} else: msg = ('Specified keywords must be either None or a ' 'dictionary. I got %s' % keywords) verify(isinstance(keywords, dict), msg) self.keywords = keywords # Style info if style_info is None: self.style_info = {} else: msg = ('Specified style_info must be either None or a ' 'dictionary. I got %s' % style_info) verify(isinstance(style_info, dict), msg) self.style_info = style_info # Defaults self.sublayer = sublayer self.filename = None self.data = None
def get_topN(self, attribute, N=10): """Get top N features :param attribute: The name of attribute where values are sought :type attribute: str :param N: How many :type N: int :returns: New vector layer with selected features """ # Input checks msg = ('Specfied attribute must be a string. ' 'I got %s' % (type(attribute))) verify(isinstance(attribute, basestring), msg) msg = 'Specified attribute was empty' verify(attribute != '', msg) msg = 'N must be a positive number. I got %i' % N verify(N > 0, msg) # Create list of values for specified attribute values = self.get_data(attribute) # Sort and select using Schwarzian transform A = zip(values, self.data, self.geometry) A.sort() # Pick top N and unpack data, geometry = zip(*A[-N:])[1:] # Create new Vector instance and return return Vector(data=data, projection=self.get_projection(), geometry=geometry, keywords=self.get_keywords())
def check_inputs(hazard, exposure, layer_name, attribute_name): """Check inputs and establish default values Args: * hazard: Hazard layer instance (any type) * exposure: Exposure layer instance (any type) * layer_name: Name of returned layer or None * attribute_name: Name of interpolated attribute or None Returns: * layer_name * attribute_name Raises: VerificationError """ msg = ('Projections must be the same: I got %s and %s' % (hazard.projection, exposure.projection)) verify(hazard.projection == exposure.projection, msg) msg = ('Parameter attribute_name must be either a string or None. ' 'I got %s' % (str(type(exposure)))[1:-1]) verify(attribute_name is None or isinstance(attribute_name, basestring), msg) msg = ('Parameter layer_name must be either a string or None. ' 'I got %s' % (str(type(exposure)))[1:-1]) verify(layer_name is None or isinstance(layer_name, basestring), msg) # Establish default names if layer_name is None: layer_name = exposure.get_name() if hazard.is_raster and attribute_name is None: layer_name = hazard.get_name() return layer_name, attribute_name
def write_to_file(self, filename, sublayer=None): """Save vector data to file Args: * filename: filename with extension .shp or .gml * sublayer: Optional string for writing a sublayer. Ignored unless we are writing to an sqlite file. Note: Shp limitation, if attribute names are longer than 10 characters they will be truncated. This is due to limitations in the shp file driver and has to be done here since gdal v1.7 onwards has changed its handling of this issue: http://www.gdal.org/ogr/drv_shapefile.html **For this reason we recommend writing to spatialite.** """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extensions ' 'sqlite, shp or gml allowed.' % filename) verify(extension in ['.sqlite', '.shp', '.gml'], msg) driver = DRIVER_MAP[extension] # FIXME (Ole): Tempory flagging of GML issue (ticket #18) if extension == '.gml': msg = ('OGR GML driver does not store geospatial reference.' 'This format is disabled for the time being. See ' 'https://github.com/AIFDR/riab/issues/18') raise WriteLayerError(msg) # Derive layername from filename (excluding preceding dirs) if sublayer is None or extension == '.shp': layername = os.path.split(basename)[-1] else: layername = sublayer # Get vector data if self.is_polygon_data: geometry = self.get_geometry(as_geometry_objects=True) else: geometry = self.get_geometry() data = self.get_data() N = len(geometry) # Clear any previous file of this name (ogr does not overwrite) try: os.remove(filename) except OSError: pass # Create new file with one layer drv = ogr.GetDriverByName(driver) if drv is None: msg = 'OGR driver %s not available' % driver raise WriteLayerError(msg) ds = drv.CreateDataSource(filename) if ds is None: msg = 'Creation of output file %s failed' % filename raise WriteLayerError(msg) lyr = ds.CreateLayer(layername, self.projection.spatial_reference, self.geometry_type) if lyr is None: msg = 'Could not create layer %s' % layername raise WriteLayerError(msg) # Define attributes if any store_attributes = False fields = [] if data is not None: if len(data) > 0: try: fields = data[0].keys() except: msg = ('Input parameter "attributes" was specified ' 'but it does not contain list of dictionaries ' 'with field information as expected. The first ' 'element is %s' % data[0]) raise WriteLayerError(msg) else: # Establish OGR types for each element ogrtypes = {} for name in fields: att = data[0][name] py_type = type(att) msg = ('Unknown type for storing vector ' 'data: %s, %s' % (name, str(py_type)[1:-1])) verify(py_type in TYPE_MAP, msg) ogrtypes[name] = TYPE_MAP[py_type] else: #msg = ('Input parameter "data" was specified ' # 'but appears to be empty') #raise InaSAFEError(msg) pass # Create attribute fields in layer store_attributes = True for name in fields: fd = ogr.FieldDefn(name, ogrtypes[name]) # FIXME (Ole): Trying to address issue #16 # But it doesn't work and # somehow changes the values of MMI in test #width = max(128, len(name)) #print name, width #fd.SetWidth(width) # Silent handling of warnings like # Warning 6: Normalized/laundered field name: #'CONTENTS_LOSS_AUD' to 'CONTENTS_L' gdal.PushErrorHandler('CPLQuietErrorHandler') if lyr.CreateField(fd) != 0: msg = 'Could not create field %s' % name raise WriteLayerError(msg) # Restore error handler gdal.PopErrorHandler() # Store geometry geom = ogr.Geometry(self.geometry_type) layer_def = lyr.GetLayerDefn() for i in range(N): # Create new feature instance feature = ogr.Feature(layer_def) # Store geometry and check if self.is_point_data: x = float(geometry[i][0]) y = float(geometry[i][1]) geom.SetPoint_2D(0, x, y) elif self.is_line_data: geom = array2line(geometry[i], geometry_type=ogr.wkbLineString) elif self.is_polygon_data: # Create polygon geometry geom = ogr.Geometry(ogr.wkbPolygon) # Add outer ring linear_ring = array2line(geometry[i].outer_ring, geometry_type=ogr.wkbLinearRing) geom.AddGeometry(linear_ring) # Add inner rings if any for A in geometry[i].inner_rings: geom.AddGeometry(array2line(A, geometry_type=ogr.wkbLinearRing)) else: msg = 'Geometry type %s not implemented' % self.geometry_type raise WriteLayerError(msg) feature.SetGeometry(geom) G = feature.GetGeometryRef() if G is None: msg = 'Could not create GeometryRef for file %s' % filename raise WriteLayerError(msg) # Store attributes if store_attributes: for j, name in enumerate(fields): actual_field_name = layer_def.GetFieldDefn(j).GetNameRef() val = data[i][name] if type(val) == numpy.ndarray: # A singleton of type <type 'numpy.ndarray'> works # for gdal version 1.6 but fails for version 1.8 # in SetField with error: NotImplementedError: # Wrong number of arguments for overloaded function val = float(val) elif val is None: val = '' # We do this because there is NaN problem on windows # NaN value must be converted to _pseudo_in to solve the # problem. But, when InaSAFE read the file, it'll be # converted back to NaN value, so that NaN in InaSAFE is a # numpy.nan # please check https://github.com/AIFDR/inasafe/issues/269 # for more information if val != val: val = _pseudo_inf feature.SetField(actual_field_name, val) # Save this feature if lyr.CreateFeature(feature) != 0: msg = 'Failed to create feature %i in file %s' % (i, filename) raise WriteLayerError(msg) feature.Destroy() # Write keywords if any write_keywords(self.keywords, basename + '.keywords')
def osm2bnpb(E, target_attribute='VCLASS'): """ Map OSM attributes to BNPB vulnerability classes This maps attributes collected in the OpenStreetMap exposure data (data.kompetisiosm.org) to 2 vulnerability classes identified by BNPB in Kajian Risiko Gempabumi VERS 1.0, 2011. They are URM: Unreinforced Masonry and RM: Reinforced Masonry Input E: Vector object representing the OSM data target_attribute: Optional name of the attribute containing the mapped vulnerability class. Default value is 'VCLASS' Output: Vector object like E, but with one new attribute (e.g. 'VCLASS') representing the vulnerability class used in the guidelines """ # Input check required = ['building_l', 'building_s'] actual = E.get_attribute_names() msg = ('Input data to osm2bnpb must have attributes %s. ' 'It has %s' % (str(required), str(actual))) for attribute in required: verify(attribute in actual, msg) # Start mapping N = len(E) attributes = E.get_data() # FIXME (Ole): Pylint says variable count is unused. Why? # pylint: disable=W0612 count = 0 # pylint: enable=W0612 for i in range(N): levels = E.get_data('building_l', i) structure = E.get_data('building_s', i) if levels is None or structure is None: vulnerability_class = 'URM' count += 1 else: # Map string variable levels to integer if levels.endswith('+'): levels = 100 try: levels = int(levels) except ValueError: # E.g. 'ILP jalan' vulnerability_class = 'URM' count += 1 else: # Start mapping depending on levels if levels >= 4: # High vulnerability_class = 'RM' elif 1 <= levels < 4: # Low if structure in ['reinforced_masonry', 'confined_masonry']: vulnerability_class = 'RM' elif 'kayu' in structure or 'wood' in structure: vulnerability_class = 'RM' else: vulnerability_class = 'URM' elif numpy.allclose(levels, 0): # A few buildings exist with 0 levels. # In general, we should be assigning here the most # frequent building in the area which could be defined # by admin boundaries. vulnerability_class = 'URM' else: msg = 'Unknown number of levels: %s' % levels raise Exception(msg) # Store new attribute value attributes[i][target_attribute] = vulnerability_class # print 'Got %i without levels or structure (out of %i total)' % (count, N) # Create new vector instance and return V = Vector(data=attributes, projection=E.get_projection(), geometry=E.get_geometry(), name=E.get_name() + ' mapped to BNPB vulnerability classes', keywords=E.get_keywords()) return V