def convert_line_to_points(V, delta): """Convert line vector data to point vector data Input V: Vector layer with line data delta: Incremental step to find the points Output Vector layer with point data and the same attributes as V """ msg = 'Input data %s must be line vector data' % V verify(V.is_line_data, msg) geometry = V.get_geometry() data = V.get_data() N = len(V) # Calculate centroids for each polygon points = [] new_data = [] for i in range(N): c = points_along_line(geometry[i], delta) # We need to create a data entry for each point. new_data.extend([data[i] for thing in c]) points.extend(c) # Create new point vector layer with same attributes and return V = Vector(data=new_data, projection=V.get_projection(), geometry=points, name='%s_point_data' % V.get_name(), keywords=V.get_keywords()) return V
def grid_to_points(A, x, y): """Convert grid data to point data :param A: Array of pixel values :type A: numpy.ndarray :param x: Longitudes corresponding to columns in A (west->east) :type x: numpy.ndarray :param y: Latitudes corresponding to rows in A (south->north) :type y: numpy.ndarray Returns: * P: Nx2 array of point coordinates * V: N array of point values """ msg = ('Longitudes must be increasing (west to east). I got %s' % str(x)) verify(x[0] < x[1], msg) msg = ('Latitudes must be increasing (south to north). I got %s' % str(y)) verify(y[0] < y[1], msg) # Create Nx2 array of x, y points corresponding to each # element in A. points = axes_to_points(x, y) # Create flat 1D row-major view of A cast as # one column vector of length MxN where M, N = A.shape #values = A.reshape((-1, 1)) values = A.reshape(-1) # Return Nx3 array with rows: x, y, value return points, values
def convert_polygons_to_centroids(V): """Convert polygon vector data to point vector data :param V: Vector layer with polygon data :type V: Vector :returns: Vector layer with point data and the same attributes as V :rtype: Vector """ msg = 'Input data %s must be polygon vector data' % V verify(V.is_polygon_data, msg) geometry = V.get_geometry() N = len(V) # Calculate points for each polygon centroids = [] for i in range(N): c = calculate_polygon_centroid(geometry[i]) centroids.append(c) # Create new point vector layer with same attributes and return V = Vector(data=V.get_data(), projection=V.get_projection(), geometry=centroids, name='%s_centroid_data' % V.get_name(), keywords=V.get_keywords()) return V
def bboxstring2list(bbox_string): """Convert bounding box string to list Input bbox_string: String of bounding box coordinates of the form 'W,S,E,N' Output bbox: List of floating point numbers with format [W, S, E, N] """ msg = ('Bounding box must be a string with coordinates following the ' 'format 105.592,-7.809,110.159,-5.647\n' 'Instead I got %s of type %s.' % (str(bbox_string), type(bbox_string))) verify(isinstance(bbox_string, basestring), msg) fields = bbox_string.split(',') msg = ('Bounding box string must have 4 coordinates in the form ' '"W,S,E,N". I got bbox == "%s"' % bbox_string) verify(len(fields) == 4, msg) for x in fields: try: float(x) except ValueError, e: msg = ('Bounding box %s contained non-numeric entry %s, ' 'original error was "%s".' % (bbox_string, x, e)) raise AssertionError(msg)
def bboxlist2string(bbox, decimals=6): """Convert bounding box list to comma separated string Input bbox: List of coordinates of the form [W, S, E, N] Output bbox_string: Format 'W,S,E,N' - each will have 6 decimal points """ msg = 'Got string %s, but expected bounding box as a list' % str(bbox) verify(not isinstance(bbox, basestring), msg) try: bbox = list(bbox) except: msg = 'Could not coerce bbox %s into a list' % str(bbox) raise Exception(msg) msg = ('Bounding box must have 4 coordinates [W, S, E, N]. ' 'I got %s' % str(bbox)) verify(len(bbox) == 4, msg) for x in bbox: try: float(x) except ValueError, e: msg = ('Bounding box %s contained non-numeric entry %s, ' 'original error was "%s".' % (bbox, x, e)) raise AssertionError(msg)
def interpolate(self, X, name=None): """Interpolate values of this raster layer to other layer Input X: Layer object defining target name: Optional name of interpolated layer. If name is None, the name of self is used. Output Y: Layer object with values of this raster layer interpolated to geometry of input layer X Note: If target geometry is polygon, data will be interpolated to its centroids and the output is a point data set. """ if X.is_raster: if self.get_geotransform() != X.get_geotransform(): # Need interpolation between grids msg = 'Intergrid interpolation not yet implemented' raise Exception(msg) else: # Rasters are aligned, no need to interpolate return self else: # Interpolate this raster layer to geometry of X msg = ('Name must be either a string or None. I got %s' % (str(type(X)))[1:-1]) verify(name is None or isinstance(name, basestring), msg) return interpolate_raster_vector(self, X, name)
def geotransform_to_axes(G, nx, ny): """Convert geotransform to coordinate axes :param G: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). :type G: tuple :param nx: Number of cells in the w-e direction :type nx: int :param ny: Number of cells in the n-s direction :type nx: int :returns: Two vectors (longitudes and latitudes) representing the grid defined by the geotransform. The values are offset by half a pixel size to correspond to pixel registration. I.e. If the grid origin (top left corner) is (105, 10) and the resolution is 1 degrees in each direction, then the vectors will take the form longitudes = [100.5, 101.5, ..., 109.5] latitudes = [0.5, 1.5, ..., 9.5] """ lon_ul = float(G[0]) # Longitude of upper left corner lat_ul = float(G[3]) # Latitude of upper left corner dx = float(G[1]) # Longitudinal resolution dy = - float(G[5]) # Latitudinal resolution (always(?) negative) verify(dx > 0) verify(dy > 0) # Coordinates of lower left corner lon_ll = lon_ul lat_ll = lat_ul - ny * dy # Coordinates of upper right corner lon_ur = lon_ul + nx * dx # Define pixel centers along each directions # This is to achieve pixel registration rather # than gridline registration dx2 = dx / 2 dy2 = dy / 2 # Define longitudes and latitudes for each axes x = numpy.linspace(lon_ll + dx2, lon_ur - dx2, nx) y = numpy.linspace(lat_ll + dy2, lat_ul - dy2, ny) # Return return x, y
def axes_to_points(x, y): """Generate all combinations of grid point coordinates from x and y axes :param x: x coordinates (array) :type x: numpy.ndarray :param y: y coordinates (array) :type y: numpy.ndarray :returns: * P: Nx2 array consisting of coordinates for all grid points defined by x and y axes. The x coordinate will vary the fastest to match the way 2D numpy arrays are laid out by default ('C' order). That way, the x and y coordinates will match a corresponding 2D array A when flattened (A.flat[:] or A.reshape(-1)) Note: Example x = [1, 2, 3] y = [10, 20] P = [[1, 10], [2, 10], [3, 10], [1, 20], [2, 20], [3, 20]] """ # Reverse y coordinates to have them start at bottom of array y = numpy.flipud(y) # Repeat x coordinates for each y (fastest varying) # noinspection PyTypeChecker X = numpy.kron(numpy.ones(len(y)), x) # Repeat y coordinates for each x (slowest varying) Y = numpy.kron(y, numpy.ones(len(x))) # Check N = len(X) verify(len(Y) == N) # Create Nx2 array of x and y coordinates X = numpy.reshape(X, (N, 1)) Y = numpy.reshape(Y, (N, 1)) P = numpy.concatenate((X, Y), axis=1) # Return return P
def get_geometry(self): """Return longitudes and latitudes (the axes) for grid. Return two vectors (longitudes and latitudes) corresponding to grid. The values are offset by half a pixel size to correspond to pixel registration. I.e. If the grid origin (top left corner) is (105, 10) and the resolution is 1 degrees in each direction, then the vectors will take the form longitudes = [100.5, 101.5, ..., 109.5] latitudes = [0.5, 1.5, ..., 9.5] """ # Get parameters for axes g = self.get_geotransform() lon_ul = g[0] # Longitude of upper left corner lat_ul = g[3] # Latitude of upper left corner dx = g[1] # Longitudinal resolution dy = - g[5] # Latitudinal resolution (always(?) negative) nx = self.columns ny = self.rows verify(dx > 0) verify(dy > 0) # Coordinates of lower left corner lon_ll = lon_ul lat_ll = lat_ul - ny * dy # Coordinates of upper right corner lon_ur = lon_ul + nx * dx # Define pixel centers along each directions dy2 = dy / 2 dx2 = dx / 2 # Define longitudes and latitudes for each axes x = numpy.linspace(lon_ll + dx2, lon_ur - dx2, nx) y = numpy.linspace(lat_ll + dy2, lat_ul - dy2, ny) # Return return x, y
def write_to_file(self, filename): """Save raster data to file Input filename: filename with extension .tif """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extension ' 'tif allowed.' % filename) verify(extension in ['.tif', '.asc'], msg) format = DRIVER_MAP[extension] # Get raster data A = self.get_data() # Get Dimensions. Note numpy and Gdal swap order N, M = A.shape # Create empty file. # FIXME (Ole): It appears that this is created as single # precision even though Float64 is specified # - see issue #17 driver = gdal.GetDriverByName(format) fid = driver.Create(filename, M, N, 1, gdal.GDT_Float64) if fid is None: msg = ('Gdal could not create filename %s using ' 'format %s' % (filename, format)) raise Exception(msg) # Write metada fid.SetProjection(str(self.projection)) fid.SetGeoTransform(self.geotransform) # Write data fid.GetRasterBand(1).WriteArray(A) # Write keywords if any write_keywords(self.keywords, basename + '.keywords')
def convert_line_to_points(V, delta): """Convert line vector data to point vector data :param V: Vector layer with line data :type V: Vector :param delta: Incremental step to find the points :type delta: float :returns: Vector layer with point data and the same attributes as V :rtype: Vector """ msg = 'Input data %s must be line vector data' % V verify(V.is_line_data, msg) geometry = V.get_geometry() data = V.get_data() N = len(V) # Calculate centroids for each polygon points = [] new_data = [] for i in range(N): c = points_along_line(geometry[i], delta) # We need to create a data entry for each point. # FIXME (Ole): What on earth is this? # pylint: disable=W0621 new_data.extend([data[i] for _ in c]) # pylint: enable=W0621 points.extend(c) # Create new point vector layer with same attributes and return V = Vector(data=new_data, projection=V.get_projection(), geometry=points, name='%s_point_data' % V.get_name(), keywords=V.get_keywords()) return V
def get_bins(self, N=10, quantiles=False): """Get N values between the min and the max occurred in this dataset. Return sorted list of length N+1 where the first element is min and the last is max. Intermediate values depend on the keyword quantiles: If quantiles is True, they represent boundaries between quantiles. If quantiles is False, they represent equidistant interval boundaries. """ min, max = self.get_extrema() levels = [] if quantiles is False: # Linear intervals d = (max - min) / N for i in range(N): levels.append(min + i * d) else: # Quantiles # FIXME (Ole): Not 100% sure about this algorithm, # but it is close enough A = self.get_data(nan=True).flat[:] mask = numpy.logical_not(numpy.isnan(A)) # Omit NaN's A = A.compress(mask) A.sort() verify(len(A) == A.shape[0]) d = float(len(A) + 0.5) / N for i in range(N): levels.append(A[int(i * d)]) levels.append(max) return levels
def sendVote(self,vote,signature): # now verify signature of vote if not utilities.verify(vote, signature, self.electionBoard.rsa_pub): print ("Vote is not signed by the authority! discounting") return ''' verify each vote is only for one candidate verification strategy: Randomly permute the vote array Send to EM EM checks each value is 0 or 1 and the sum is 1 Also ensure there are a correct number of candidates ''' permute = utilities.permute(vote) if not self.electionBoard.checkValidity(permute): print("Invalid vote detected!") return # cannot count this vote if len(vote) != self.numCandidates: print("Invalid vote detected!") return # cannot count this vote self.temp_vote = vote
def get_data(self, attribute=None, index=None): """Get vector attributes Data is returned as a list where each entry is a dictionary of attributes for one feature. Entries in get_geometry() and get_data() are related as 1-to-1 If optional argument attribute is specified and a valid name, then the list of values for that attribute is returned. If optional argument index is specified on the that value will be returned. Any value of index is ignored if attribute is None. """ if hasattr(self, 'data'): if attribute is None: return self.data else: msg = ('Specified attribute %s does not exist in ' 'vector layer %s. Valid names are %s' '' % (attribute, self, self.data[0].keys())) verify(attribute in self.data[0], msg) if index is None: # Return all values for specified attribute return [x[attribute] for x in self.data] else: # Return value for specified attribute and index msg = ('Specified index must be either None or ' 'an integer. I got %s' % index) verify(type(index) == type(0), msg) msg = ('Specified index must lie within the bounds ' 'of vector layer %s which is [%i, %i]' '' % (self, 0, len(self) - 1)) verify(0 <= index < len(self), msg) return self.data[index][attribute] else: msg = 'Vector data instance does not have any attributes' raise Exception(msg)
def get_topN(self, attribute, N=10): """Get top N features Input attribute: The name of attribute where values are sought N: How many Output layer: New vector layer with selected features """ # FIXME (Ole): Maybe generalise this to arbitrary expressions # Input checks msg = ('Specfied attribute must be a string. ' 'I got %s' % (type(attribute))) verify(isinstance(attribute, basestring), msg) msg = 'Specified attribute was empty' verify(attribute != '', msg) msg = 'N must be a positive number. I got %i' % N verify(N > 0, msg) # Create list of values for specified attribute values = self.get_data(attribute) # Sort and select using Schwarzian transform A = zip(values, self.data, self.geometry) A.sort() # Pick top N and unpack _, data, geometry = zip(*A[-N:]) # Create new Vector instance and return return Vector(data=data, projection=self.get_projection(), geometry=geometry)
def get_topN(self, attribute, N=10): """Get top N features :param attribute: The name of attribute where values are sought :type attribute: str :param N: How many :type N: int :returns: New vector layer with selected features """ # Input checks msg = ('Specfied attribute must be a string. ' 'I got %s' % (type(attribute))) verify(isinstance(attribute, basestring), msg) msg = 'Specified attribute was empty' verify(attribute != '', msg) msg = 'N must be a positive number. I got %i' % N verify(N > 0, msg) # Create list of values for specified attribute values = self.get_data(attribute) # Sort and select using Schwarzian transform A = zip(values, self.data, self.geometry) A.sort() # Pick top N and unpack data, geometry = zip(*A[-N:])[1:] # Create new Vector instance and return return Vector(data=data, projection=self.get_projection(), geometry=geometry, keywords=self.get_keywords())
def write_to_file(self, filename, sublayer=None): """Save vector data to file :param filename: filename with extension .shp or .gml :type filename: str :param sublayer: Optional parameter for writing a sublayer. Ignored unless we are writing to an sqlite file. :type sublayer: str :raises: WriteLayerError Note: Shp limitation, if attribute names are longer than 10 characters they will be truncated. This is due to limitations in the shp file driver and has to be done here since gdal v1.7 onwards has changed its handling of this issue: http://www.gdal.org/ogr/drv_shapefile.html **For this reason we recommend writing to spatialite.** """ # Check file format base_name, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extensions ' 'sqlite, shp or gml allowed.' % filename) verify(extension in ['.sqlite', '.shp', '.gml'], msg) driver = DRIVER_MAP[extension] # FIXME (Ole): Tempory flagging of GML issue (ticket #18) if extension == '.gml': msg = ('OGR GML driver does not store geospatial reference.' 'This format is disabled for the time being. See ' 'https://github.com/AIFDR/riab/issues/18') raise WriteLayerError(msg) # Derive layer_name from filename (excluding preceding dirs) if sublayer is None or extension == '.shp': layer_name = os.path.split(base_name)[-1] else: layer_name = sublayer # Get vector data if self.is_polygon_data: geometry = self.get_geometry(as_geometry_objects=True) else: geometry = self.get_geometry() data = self.get_data() N = len(geometry) # Clear any previous file of this name (ogr does not overwrite) try: os.remove(filename) except OSError: pass # Create new file with one layer drv = ogr.GetDriverByName(driver) if drv is None: msg = 'OGR driver %s not available' % driver raise WriteLayerError(msg) ds = drv.CreateDataSource(get_string(filename)) if ds is None: msg = 'Creation of output file %s failed' % filename raise WriteLayerError(msg) lyr = ds.CreateLayer(get_string(layer_name), self.projection.spatial_reference, self.geometry_type) if lyr is None: msg = 'Could not create layer %s' % layer_name raise WriteLayerError(msg) # Define attributes if any store_attributes = False fields = [] if data is not None: if len(data) > 0: try: fields = data[0].keys() except: msg = ('Input parameter "attributes" was specified ' 'but it does not contain list of dictionaries ' 'with field information as expected. The first ' 'element is %s' % data[0]) raise WriteLayerError(msg) else: # Establish OGR types for each element ogr_types = {} for name in fields: att = data[0][name] py_type = type(att) msg = ('Unknown type for storing vector ' 'data: %s, %s' % (name, str(py_type)[1:-1])) verify(py_type in TYPE_MAP, msg) ogr_types[name] = TYPE_MAP[py_type] else: # msg = ('Input parameter "data" was specified ' # 'but appears to be empty') # raise InaSAFEError(msg) pass # Create attribute fields in layer store_attributes = True for name in fields: fd = ogr.FieldDefn(name, ogr_types[name]) # FIXME (Ole): Trying to address issue #16 # But it doesn't work and # somehow changes the values of MMI in test # width = max(128, len(name)) # print name, width # fd.SetWidth(width) # Silent handling of warnings like # Warning 6: Normalized/laundered field name: # 'CONTENTS_LOSS_AUD' to 'CONTENTS_L' gdal.PushErrorHandler('CPLQuietErrorHandler') if lyr.CreateField(fd) != 0: msg = 'Could not create field %s' % name raise WriteLayerError(msg) # Restore error handler gdal.PopErrorHandler() # Store geometry geom = ogr.Geometry(self.geometry_type) layer_def = lyr.GetLayerDefn() for i in range(N): # Create new feature instance feature = ogr.Feature(layer_def) # Store geometry and check if self.is_point_data: x = float(geometry[i][0]) y = float(geometry[i][1]) geom.SetPoint_2D(0, x, y) elif self.is_line_data: geom = array_to_line( geometry[i], geometry_type=ogr.wkbLineString) elif self.is_polygon_data: # Create polygon geometry geom = ogr.Geometry(ogr.wkbPolygon) # Add outer ring linear_ring = array_to_line( geometry[i].outer_ring, geometry_type=ogr.wkbLinearRing) geom.AddGeometry(linear_ring) # Add inner rings if any for A in geometry[i].inner_rings: geom.AddGeometry(array_to_line( A, geometry_type=ogr.wkbLinearRing)) else: msg = 'Geometry type %s not implemented' % self.geometry_type raise WriteLayerError(msg) feature.SetGeometry(geom) G = feature.GetGeometryRef() if G is None: msg = 'Could not create GeometryRef for file %s' % filename raise WriteLayerError(msg) # Store attributes if store_attributes: for j, name in enumerate(fields): actual_field_name = layer_def.GetFieldDefn(j).GetNameRef() val = data[i][name] if isinstance(val, numpy.ndarray): # A singleton of type <type 'numpy.ndarray'> works # for gdal version 1.6 but fails for version 1.8 # in SetField with error: NotImplementedError: # Wrong number of arguments for overloaded function val = float(val) elif val is None: val = '' # We do this because there is NaN problem on windows # NaN value must be converted to _pseudo_in to solve the # problem. But, when InaSAFE read the file, it'll be # converted back to NaN value, so that NaN in InaSAFE is a # numpy.nan # please check https://github.com/AIFDR/inasafe/issues/269 # for more information if val != val: val = _pseudo_inf feature.SetField(actual_field_name, val) # Save this feature if lyr.CreateFeature(feature) != 0: msg = 'Failed to create feature %i in file %s' % (i, filename) raise WriteLayerError(msg) feature.Destroy() # Write keywords if any write_keywords(self.keywords, base_name + '.keywords')
def __init__( self, data=None, projection=None, geometry=None, geometry_type=None, name=None, keywords=None, style_info=None, sublayer=None): """Initialise object with either geometry or filename NOTE: Doc strings in constructor are not harvested and exposed in online documentation. Hence the details are specified in the class docstring. """ # Invoke common layer constructor Layer.__init__( self, name=name, projection=projection, keywords=keywords, style_info=style_info, sublayer=sublayer) # Input checks if data is None and geometry is None: # Instantiate empty object self.geometry_type = None self.extent = [0, 0, 0, 0] return if isinstance(data, basestring): self.read_from_file(data) # check QGIS_IS_AVAILABLE to avoid QgsVectorLayer undefined error elif QGIS_IS_AVAILABLE and isinstance(data, QgsVectorLayer): self.read_from_qgis_native(data) else: # Assume that data is provided as sequences provided as # arguments to the Vector constructor # with extra keyword arguments supplying metadata msg = 'Geometry must be specified' verify(geometry is not None, msg) msg = 'Geometry must be a sequence' verify(is_sequence(geometry), msg) if len(geometry) > 0 and isinstance(geometry[0], Polygon): self.geometry_type = ogr.wkbPolygon self.geometry = geometry else: self.geometry_type = get_geometry_type(geometry, geometry_type) if self.is_polygon_data: # Convert to objects if input is a list of simple arrays self.geometry = [Polygon(outer_ring=x) for x in geometry] else: # Convert to list if input is an array if isinstance(geometry, numpy.ndarray): self.geometry = geometry.tolist() else: self.geometry = geometry if data is None: # Generate default attribute as OGR will do that anyway # when writing data = [] for i in range(len(geometry)): data.append({'ID': i}) # Check data self.data = data if data is not None: msg = 'Data must be a sequence' verify(is_sequence(data), msg) msg = ('The number of entries in geometry (%s) and data (%s)' 'must be the same' % (len(geometry), len(data))) verify(len(geometry) == len(data), msg) # Establish extent if len(geometry) == 0: # Degenerate layer self.extent = [0, 0, 0, 0] return # Compute bounding box for each geometry type minx = miny = sys.maxint maxx = maxy = -minx if self.is_point_data: A = numpy.array(self.get_geometry()) minx = min(A[:, 0]) maxx = max(A[:, 0]) miny = min(A[:, 1]) maxy = max(A[:, 1]) elif self.is_line_data: for g in self.get_geometry(): A = numpy.array(g) minx = min(minx, min(A[:, 0])) maxx = max(maxx, max(A[:, 0])) miny = min(miny, min(A[:, 1])) maxy = max(maxy, max(A[:, 1])) elif self.is_polygon_data: # Do outer ring only for g in self.get_geometry(as_geometry_objects=False): A = numpy.array(g) minx = min(minx, min(A[:, 0])) maxx = max(maxx, max(A[:, 0])) miny = min(miny, min(A[:, 1])) maxy = max(maxy, max(A[:, 1])) self.extent = [minx, maxx, miny, maxy]
def get_data(self, nan=True, scaling=None): """Get raster data as numeric array Input nan: Optional flag controlling handling of missing values. If nan is True (default), nodata values will be replaced with numpy.nan If keyword nan has a numeric value, nodata values will be replaced by that value. E.g. to set missing values to 0, do get_data(nan=0.0) scaling: Optional flag controlling if data is to be scaled if it has been resampled. Admissible values are False: data is retrieved without modification. True: Data is rescaled based on the squared ratio between its current and native resolution. This is typically required if raster data represents a density such as population per km^2 None: The behaviour will depend on the keyword "population" associated with the layer. If it is "density", scaling will be applied otherwise not. This is the default. scalar value: If scaling takes a numerical scalar value, that will be use to scale the data """ if hasattr(self, 'data'): A = self.data verify(A.shape[0] == self.rows and A.shape[1] == self.columns) else: # Read from raster file A = self.band.ReadAsArray() # Convert to double precision (issue #75) A = numpy.array(A, dtype=numpy.float64) # Self check M, N = A.shape msg = ('Dimensions of raster array do not match those of ' 'raster file %s' % self.filename) verify(M == self.rows, msg) verify(N == self.columns, msg) # Handle no data value if nan is False: pass else: if nan is True: NAN = numpy.nan else: NAN = nan # Replace NODATA_VALUE with NaN nodata = self.get_nodata_value() NaN = numpy.ones(A.shape, A.dtype) * NAN A = numpy.where(A == nodata, NaN, A) # Take care of possible scaling if scaling is None: # Redefine scaling from density keyword if possible kw = self.get_keywords() if 'datatype' in kw and kw['datatype'].lower() == 'density': scaling = True else: scaling = False if scaling is False: # No change sigma = 1 elif scaling is True: # Calculate scaling based on resolution change actual_res = self.get_resolution(isotropic=True) native_res = self.get_resolution(isotropic=True, native=True) #print #print 'Actual res', actual_res #print 'Native res', native_res sigma = (actual_res / native_res) ** 2 #print 'Scaling', sigma else: # See if scaling can work as a scalar value try: sigma = float(scaling) except Exception, e: msg = ('Keyword scaling "%s" could not be converted to a ' 'number. It must be either True, False, None or a ' 'number: %s' % (scaling, str(e))) raise Exception(msg)
def __init__(self, data=None, projection=None, geometry=None, geometry_type=None, name='', keywords=None, style_info=None): """Initialise object with either geometry or filename Input data: Can be either * a filename of a vector file format known to GDAL * List of dictionaries of fields associated with point coordinates * None projection: Geospatial reference in WKT format. Only used if geometry is provide as a numeric array, geometry: A list of either point coordinates or polygons/lines (see note below) geometry_type: Desired interpretation of geometry. Valid options are 'point', 'line', 'polygon' or the ogr types: 1, 2, 3 If None, a geometry_type will be inferred name: Optional name for layer. Only used if geometry is provide as a numeric array keywords: Optional dictionary with keywords that describe the layer. When the layer is stored, these keywords will be written into an associated file with extension .keywords. Keywords can for example be used to display text about the layer in a web application. Notes If data is a filename, all other arguments are ignored as they will be inferred from the file. The geometry type will be inferred from the dimensions of geometry. If each entry is one set of coordinates the type will be ogr.wkbPoint, if it is an array of coordinates the type will be ogr.wkbPolygon. Each polygon or line feature take the form of an Nx2 array representing vertices where line segments are joined """ if data is None and projection is None and geometry is None: # Instantiate empty object self.name = name self.projection = None self.geometry = None self.geometry_type = None self.filename = None self.data = None self.extent = None self.keywords = {} self.style_info = {} return if isinstance(data, basestring): self.read_from_file(data) else: # Assume that data is provided as sequences provided as # arguments to the Vector constructor # with extra keyword arguments supplying metadata self.name = name self.filename = None if keywords is None: self.keywords = {} else: msg = ('Specified keywords must be either None or a ' 'dictionary. I got %s' % keywords) verify(isinstance(keywords, dict), msg) self.keywords = keywords if style_info is None: self.style_info = {} else: msg = ('Specified style_info must be either None or a ' 'dictionary. I got %s' % style_info) verify(isinstance(style_info, dict), msg) self.style_info = style_info msg = 'Geometry must be specified' verify(geometry is not None, msg) msg = 'Geometry must be a sequence' verify(is_sequence(geometry), msg) self.geometry = geometry self.geometry_type = get_geometry_type(geometry, geometry_type) #msg = 'Projection must be specified' #verify(projection is not None, msg) self.projection = Projection(projection) if data is None: # Generate default attribute as OGR will do that anyway # when writing data = [] for i in range(len(geometry)): data.append({'ID': i}) # Check data self.data = data if data is not None: msg = 'Data must be a sequence' verify(is_sequence(data), msg) msg = ('The number of entries in geometry and data ' 'must be the same') verify(len(geometry) == len(data), msg)
def write_to_file(self, filename): """Save vector data to file Input filename: filename with extension .shp or .gml Note, if attribute names are longer than 10 characters they will be truncated. This is due to limitations in the shp file driver and has to be done here since gdal v1.7 onwards has changed its handling of this issue: http://www.gdal.org/ogr/drv_shapefile.html """ # Check file format basename, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extensions ' 'shp or gml allowed.' % filename) verify(extension == '.shp' or extension == '.gml', msg) driver = DRIVER_MAP[extension] # FIXME (Ole): Tempory flagging of GML issue (ticket #18) if extension == '.gml': msg = ('OGR GML driver does not store geospatial reference.' 'This format is disabled for the time being. See ' 'https://github.com/AIFDR/riab/issues/18') raise Exception(msg) # Derive layername from filename (excluding preceding dirs) layername = os.path.split(basename)[-1] # Get vector data geometry = self.get_geometry() data = self.get_data() N = len(geometry) # Clear any previous file of this name (ogr does not overwrite) try: os.remove(filename) except: pass # Create new file with one layer drv = ogr.GetDriverByName(driver) if drv is None: msg = 'OGR driver %s not available' % driver raise Exception(msg) ds = drv.CreateDataSource(filename) if ds is None: msg = 'Creation of output file %s failed' % filename raise Exception(msg) lyr = ds.CreateLayer(layername, self.projection.spatial_reference, self.geometry_type) if lyr is None: msg = 'Could not create layer %s' % layername raise Exception(msg) # Define attributes if any store_attributes = False if data is not None: if len(data) > 0: try: fields = data[0].keys() except: msg = ('Input parameter "attributes" was specified ' 'but it does not contain dictionaries with ' 'field information as expected. The first' 'element is %s' % data[0]) raise Exception(msg) else: # Establish OGR types for each element ogrtypes = {} for name in fields: att = data[0][name] py_type = type(att) msg = ('Unknown type for storing vector ' 'data: %s, %s' % (name, str(py_type)[1:-1])) verify(py_type in TYPE_MAP, msg) ogrtypes[name] = TYPE_MAP[py_type] else: msg = ('Input parameter "data" was specified ' 'but appears to be empty') raise Exception(msg) # Create attribute fields in layer store_attributes = True for name in fields: fd = ogr.FieldDefn(name, ogrtypes[name]) # FIXME (Ole): Trying to address issue #16 # But it doesn't work and # somehow changes the values of MMI in test #width = max(128, len(name)) #print name, width #fd.SetWidth(width) # Silent handling of warnings like # Warning 6: Normalized/laundered field name: #'CONTENTS_LOSS_AUD' to 'CONTENTS_L' gdal.PushErrorHandler('CPLQuietErrorHandler') if lyr.CreateField(fd) != 0: msg = 'Could not create field %s' % name raise Exception(msg) # Restore error handler gdal.PopErrorHandler() # Store geometry geom = ogr.Geometry(self.geometry_type) layer_def = lyr.GetLayerDefn() for i in range(N): # Create new feature instance feature = ogr.Feature(layer_def) # Store geometry and check if self.geometry_type == ogr.wkbPoint: x = float(geometry[i][0]) y = float(geometry[i][1]) geom.SetPoint_2D(0, x, y) elif self.geometry_type == ogr.wkbPolygon: wkt = array2wkt(geometry[i], geom_type='POLYGON') geom = ogr.CreateGeometryFromWkt(wkt) elif self.geometry_type == ogr.wkbLineString: wkt = array2wkt(geometry[i], geom_type='LINESTRING') geom = ogr.CreateGeometryFromWkt(wkt) else: msg = 'Geometry type %s not implemented' % self.geometry_type raise Exception(msg) feature.SetGeometry(geom) G = feature.GetGeometryRef() if G is None: msg = 'Could not create GeometryRef for file %s' % filename raise Exception(msg) # Store attributes if store_attributes: for j, name in enumerate(fields): actual_field_name = layer_def.GetFieldDefn(j).GetNameRef() val = data[i][name] if type(val) == numpy.ndarray: # A singleton of type <type 'numpy.ndarray'> works # for gdal version 1.6 but fails for version 1.8 # in SetField with error: NotImplementedError: # Wrong number of arguments for overloaded function val = float(val) elif val is None: val = '' feature.SetField(actual_field_name, val) # Save this feature if lyr.CreateFeature(feature) != 0: msg = 'Failed to create feature %i in file %s' % (i, filename) raise Exception(msg) feature.Destroy() # Write keywords if any write_keywords(self.keywords, basename + '.keywords')
def interpolate(self, X, name=None, attribute=None): """Interpolate values of this vector layer to other layer Input X: Layer object defining target name: Optional name of interpolated layer attribute: Optional attribute name to use. If None, all attributes are used. FIXME (Ole): Single attribute not tested well yet and not implemented for lines Output Y: Layer object with values of this vector layer interpolated to geometry of input layer X """ msg = 'Input to Vector.interpolate must be a vector layer instance' verify(X.is_vector, msg) X_projection = X.get_projection() S_projection = self.get_projection() msg = ('Projections must be the same: I got %s and %s' % (S_projection, X_projection)) verify(S_projection == X_projection, msg) msg = ('Vector layer to interpolate from must be polygon geometry. ' 'I got OGR geometry type %s' % geometrytype2string(self.geometry_type)) verify(self.is_polygon_data, msg) # FIXME (Ole): Maybe organise this the same way it is done with rasters # FIXME (Ole): Retain original geometry to use with returned data here if X.is_polygon_data: # Use centroids, in case of polygons X = convert_polygons_to_centroids(X) elif X.is_line_data: # Clip lines to polygon and return centroids # FIXME (Ole): Need to separate this out, but identify what is # common with points and lines # #X.write_to_file('line_data.shp') #self.write_to_file('poly_data.shp') # Extract line features lines = X.get_geometry() line_attributes = X.get_data() N = len(X) verify(len(lines) == N) verify(len(line_attributes) == N) # Extract polygon features polygons = self.get_geometry() poly_attributes = self.get_data() verify(len(polygons) == len(poly_attributes)) # Data structure for resulting line segments clipped_geometry = [] clipped_attributes = [] # Clip line lines to polygons for i, polygon in enumerate(polygons): for j, line in enumerate(lines): inside, outside = clip_line_by_polygon(line, polygon) # Create new attributes # FIXME (Ole): Not done single specified polygon # attribute inside_attributes = {} outside_attributes = {} for key in line_attributes[j]: inside_attributes[key] = line_attributes[j][key] outside_attributes[key] = line_attributes[j][key] for key in poly_attributes[i]: inside_attributes[key] = poly_attributes[i][key] outside_attributes[key] = None # Always create default attribute flagging if segment was # inside any of the polygons inside_attributes[DEFAULT_ATTRIBUTE] = True outside_attributes[DEFAULT_ATTRIBUTE] = False # Assign new attribute set to clipped lines for segment in inside: clipped_geometry.append(segment) clipped_attributes.append(inside_attributes) for segment in outside: clipped_geometry.append(segment) clipped_attributes.append(outside_attributes) # Create new Vector instance and return V = Vector(data=clipped_attributes, projection=X.get_projection(), geometry=clipped_geometry, geometry_type='line') #V.write_to_file('clipped_and_tagged.shp') return V # The following applies only to Polygon-Point interpolation msg = ('Vector layer to interpolate to must be point geometry. ' 'I got OGR geometry type %s' % geometrytype2string(X.geometry_type)) verify(X.is_point_data, msg) msg = ('Name must be either a string or None. I got %s' % (str(type(X)))[1:-1]) verify(name is None or isinstance(name, basestring), msg) msg = ('Attribute must be either a string or None. I got %s' % (str(type(X)))[1:-1]) verify(attribute is None or isinstance(attribute, basestring), msg) attribute_names = self.get_attribute_names() if attribute is not None: msg = ('Requested attribute "%s" did not exist in %s' % (attribute, attribute_names)) verify(attribute in attribute_names, msg) #---------------- # Start algorithm #---------------- # Extract point features points = ensure_numeric(X.get_geometry()) attributes = X.get_data() N = len(X) # Extract polygon features geom = self.get_geometry() data = self.get_data() verify(len(geom) == len(data)) # Augment point features with empty attributes from polygon for a in attributes: if attribute is None: # Use all attributes for key in attribute_names: a[key] = None else: # Use only requested attribute # FIXME (Ole): Test for this is not finished a[attribute] = None # Always create default attribute flagging if point was # inside any of the polygons a[DEFAULT_ATTRIBUTE] = None # Traverse polygons and assign attributes to points that fall inside for i, polygon in enumerate(geom): if attribute is None: # Use all attributes poly_attr = data[i] else: # Use only requested attribute poly_attr = {attribute: data[i][attribute]} # Assign default attribute to indicate points inside poly_attr[DEFAULT_ATTRIBUTE] = True # Clip data points by polygons and add polygon attributes indices = inside_polygon(points, polygon) for k in indices: for key in poly_attr: # Assign attributes from polygon to points attributes[k][key] = poly_attr[key] # Create new Vector instance and return V = Vector(data=attributes, projection=X.get_projection(), geometry=X.get_geometry()) return V
def get_data(self, attribute=None, index=None, copy=False): """Get vector attributes. :param attribute: Specify an attribute name of which to return data. :type attribute: str :param index: Indicates a specific value on which to call the attribute. Ignored if no attribute is set. :type index: int :param copy: Indicate whether to return a pointer to the data, or a copy of. :type copy: bool :raises: GetDataError :returns: A list where each entry is a dictionary of attributes for one feature. :rtype: list, Note: Data is returned as a list where each entry is a dictionary of attributes for one feature. Entries in get_geometry() and get_data() are related as 1-to-1 If optional argument attribute is specified and a valid name, then the list of values for that attribute is returned. If optional argument index is specified on the that value will be returned. Any value of index is ignored if attribute is None. If optional argument copy is True and all attributes are requested, a copy will be returned. Otherwise a pointer to the data is returned. """ if hasattr(self, 'data'): if attribute is None: if copy: return copy_module.deepcopy(self.data) else: return self.data else: msg = ('Specified attribute %s does not exist in ' 'vector layer %s. Valid names are %s' '' % (attribute, self, self.data[0].keys())) verify(attribute in self.data[0], msg) if index is None: # Return all values for specified attribute return [x[attribute] for x in self.data] else: # Return value for specified attribute and index msg = ('Specified index must be either None or ' 'an integer. I got %s' % index) verify(isinstance(index, int), msg) msg = ('Specified index must lie within the bounds ' 'of vector layer %s which is [%i, %i]' '' % (self, 0, len(self) - 1)) verify(0 <= index < len(self), msg) return self.data[index][attribute] else: msg = 'Vector data instance does not have any attributes' raise GetDataError(msg)
def write_to_file(self, filename, sublayer=None): """Save vector data to file :param filename: filename with extension .shp or .gml :type filename: str :param sublayer: Optional parameter for writing a sublayer. Ignored unless we are writing to an sqlite file. :type sublayer: str :raises: WriteLayerError Note: Shp limitation, if attribute names are longer than 10 characters they will be truncated. This is due to limitations in the shp file driver and has to be done here since gdal v1.7 onwards has changed its handling of this issue: http://www.gdal.org/ogr/drv_shapefile.html **For this reason we recommend writing to spatialite.** """ # Check file format base_name, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extensions ' 'sqlite, shp or gml allowed.' % filename) verify(extension in ['.sqlite', '.shp', '.gml'], msg) driver = DRIVER_MAP[extension] # FIXME (Ole): Tempory flagging of GML issue (ticket #18) if extension == '.gml': msg = ('OGR GML driver does not store geospatial reference.' 'This format is disabled for the time being. See ' 'https://github.com/AIFDR/riab/issues/18') raise WriteLayerError(msg) # Derive layer_name from filename (excluding preceding dirs) if sublayer is None or extension == '.shp': layer_name = os.path.split(base_name)[-1] else: layer_name = sublayer # Get vector data if self.is_polygon_data: geometry = self.get_geometry(as_geometry_objects=True) else: geometry = self.get_geometry() data = self.get_data() N = len(geometry) # Clear any previous file of this name (ogr does not overwrite) try: os.remove(filename) except OSError: pass # Create new file with one layer drv = ogr.GetDriverByName(driver) if drv is None: msg = 'OGR driver %s not available' % driver raise WriteLayerError(msg) ds = drv.CreateDataSource(get_string(filename)) if ds is None: msg = 'Creation of output file %s failed' % filename raise WriteLayerError(msg) lyr = ds.CreateLayer(get_string(layer_name), self.projection.spatial_reference, self.geometry_type) if lyr is None: msg = 'Could not create layer %s' % layer_name raise WriteLayerError(msg) # Define attributes if any store_attributes = False fields = [] if data is not None: if len(data) > 0: try: fields = data[0].keys() except: msg = ('Input parameter "attributes" was specified ' 'but it does not contain list of dictionaries ' 'with field information as expected. The first ' 'element is %s' % data[0]) raise WriteLayerError(msg) else: # Establish OGR types for each element ogr_types = {} for name in fields: att = data[0][name] py_type = type(att) msg = ('Unknown type for storing vector ' 'data: %s, %s' % (name, str(py_type)[1:-1])) verify(py_type in TYPE_MAP, msg) ogr_types[name] = TYPE_MAP[py_type] else: # msg = ('Input parameter "data" was specified ' # 'but appears to be empty') # raise InaSAFEError(msg) pass # Create attribute fields in layer store_attributes = True for name in fields: # Rizky : OGR can't handle unicode field name, thus we # convert it to ASCII fd = ogr.FieldDefn(str(name), ogr_types[name]) # FIXME (Ole): Trying to address issue #16 # But it doesn't work and # somehow changes the values of MMI in test # width = max(128, len(name)) # print name, width # fd.SetWidth(width) # Silent handling of warnings like # Warning 6: Normalized/laundered field name: # 'CONTENTS_LOSS_AUD' to 'CONTENTS_L' gdal.PushErrorHandler('CPLQuietErrorHandler') if lyr.CreateField(fd) != 0: msg = 'Could not create field %s' % name raise WriteLayerError(msg) # Restore error handler gdal.PopErrorHandler() # Store geometry geom = ogr.Geometry(self.geometry_type) layer_def = lyr.GetLayerDefn() for i in range(N): # Create new feature instance feature = ogr.Feature(layer_def) # Store geometry and check if self.is_point_data: x = float(geometry[i][0]) y = float(geometry[i][1]) geom.SetPoint_2D(0, x, y) elif self.is_line_data: geom = array_to_line(geometry[i], geometry_type=ogr.wkbLineString) elif self.is_polygon_data: # Create polygon geometry geom = ogr.Geometry(ogr.wkbPolygon) # Add outer ring linear_ring = array_to_line(geometry[i].outer_ring, geometry_type=ogr.wkbLinearRing) geom.AddGeometry(linear_ring) # Add inner rings if any for A in geometry[i].inner_rings: geom.AddGeometry( array_to_line(A, geometry_type=ogr.wkbLinearRing)) else: msg = 'Geometry type %s not implemented' % self.geometry_type raise WriteLayerError(msg) feature.SetGeometry(geom) G = feature.GetGeometryRef() if G is None: msg = 'Could not create GeometryRef for file %s' % filename raise WriteLayerError(msg) # Store attributes if store_attributes: for j, name in enumerate(fields): actual_field_name = layer_def.GetFieldDefn(j).GetNameRef() val = data[i][name] if isinstance(val, numpy.ndarray): # A singleton of type <type 'numpy.ndarray'> works # for gdal version 1.6 but fails for version 1.8 # in SetField with error: NotImplementedError: # Wrong number of arguments for overloaded function val = float(val) elif val is None: val = '' # We do this because there is NaN problem on windows # NaN value must be converted to _pseudo_in to solve the # problem. But, when InaSAFE read the file, it'll be # converted back to NaN value, so that NaN in InaSAFE is a # numpy.nan # please check https://github.com/AIFDR/inasafe/issues/269 # for more information if val != val: val = _pseudo_inf feature.SetField(actual_field_name, val) # Save this feature if lyr.CreateFeature(feature) != 0: msg = 'Failed to create feature %i in file %s' % (i, filename) raise WriteLayerError(msg) feature.Destroy() # Write keywords if any # write_keywords(self.keywords, base_name + '.keywords') write_iso19115_metadata(filename, self.keywords) self.keywords = read_iso19115_metadata(filename)
def __init__(self, data=None, projection=None, geometry=None, geometry_type=None, name=None, keywords=None, style_info=None, sublayer=None): """Initialise object with either geometry or filename NOTE: Doc strings in constructor are not harvested and exposed in online documentation. Hence the details are specified in the class docstring. """ # Invoke common layer constructor Layer.__init__(self, name=name, projection=projection, keywords=keywords, style_info=style_info, sublayer=sublayer) # Input checks if data is None and geometry is None: # Instantiate empty object self.geometry_type = None self.extent = [0, 0, 0, 0] return if isinstance(data, basestring): self.read_from_file(data) # check QGIS_IS_AVAILABLE to avoid QgsVectorLayer undefined error elif QGIS_IS_AVAILABLE and isinstance(data, QgsVectorLayer): self.read_from_qgis_native(data) else: # Assume that data is provided as sequences provided as # arguments to the Vector constructor # with extra keyword arguments supplying metadata msg = 'Geometry must be specified' verify(geometry is not None, msg) msg = 'Geometry must be a sequence' verify(is_sequence(geometry), msg) if len(geometry) > 0 and isinstance(geometry[0], Polygon): self.geometry_type = ogr.wkbPolygon self.geometry = geometry else: self.geometry_type = get_geometry_type(geometry, geometry_type) if self.is_polygon_data: # Convert to objects if input is a list of simple arrays self.geometry = [Polygon(outer_ring=x) for x in geometry] else: # Convert to list if input is an array if isinstance(geometry, numpy.ndarray): self.geometry = geometry.tolist() else: self.geometry = geometry if data is None: # Generate default attribute as OGR will do that anyway # when writing data = [] for i in range(len(geometry)): data.append({'ID': i}) # Check data self.data = data if data is not None: msg = 'Data must be a sequence' verify(is_sequence(data), msg) msg = ('The number of entries in geometry (%s) and data (%s)' 'must be the same' % (len(geometry), len(data))) verify(len(geometry) == len(data), msg) # Establish extent if len(geometry) == 0: # Degenerate layer self.extent = [0, 0, 0, 0] return # Compute bounding box for each geometry type minx = miny = sys.maxint maxx = maxy = -minx if self.is_point_data: A = numpy.array(self.get_geometry()) minx = min(A[:, 0]) maxx = max(A[:, 0]) miny = min(A[:, 1]) maxy = max(A[:, 1]) elif self.is_line_data: for g in self.get_geometry(): A = numpy.array(g) minx = min(minx, min(A[:, 0])) maxx = max(maxx, max(A[:, 0])) miny = min(miny, min(A[:, 1])) maxy = max(maxy, max(A[:, 1])) elif self.is_polygon_data: # Do outer ring only for g in self.get_geometry(as_geometry_objects=False): A = numpy.array(g) minx = min(minx, min(A[:, 0])) maxx = max(maxx, max(A[:, 0])) miny = min(miny, min(A[:, 1])) maxy = max(maxy, max(A[:, 1])) self.extent = [minx, maxx, miny, maxy]
def check_bbox_string(bbox_string): """Check that bbox string is valid """ msg = 'Expected bbox as a string with format "W,S,E,N"' verify(isinstance(bbox_string, basestring), msg) # Use checks from string to list conversion # FIXME (Ole): Would be better to separate the checks from the conversion # and use those checks directly. minx, miny, maxx, maxy = bboxstring2list(bbox_string) # Check semantic integrity msg = ('Western border %.5f of bounding box %s was out of range ' 'for longitudes ([-180:180])' % (minx, bbox_string)) verify(-180 <= minx <= 180, msg) msg = ('Eastern border %.5f of bounding box %s was out of range ' 'for longitudes ([-180:180])' % (maxx, bbox_string)) verify(-180 <= maxx <= 180, msg) msg = ('Southern border %.5f of bounding box %s was out of range ' 'for latitudes ([-90:90])' % (miny, bbox_string)) verify(-90 <= miny <= 90, msg) msg = ('Northern border %.5f of bounding box %s was out of range ' 'for latitudes ([-90:90])' % (maxy, bbox_string)) verify(-90 <= maxy <= 90, msg) msg = ('Western border %.5f was greater than or equal to eastern border ' '%.5f of bounding box %s' % (minx, maxx, bbox_string)) verify(minx < maxx, msg) msg = ('Southern border %.5f was greater than or equal to northern border ' '%.5f of bounding box %s' % (miny, maxy, bbox_string)) verify(miny < maxy, msg)
def __init__(self, data=None, projection=None, geotransform=None, name='', keywords=None, style_info=None): """Initialise object with either data or filename Input data: Can be either * a filename of a raster file format known to GDAL * an MxN array of raster data * None (FIXME (Ole): Remove this option) projection: Geospatial reference in WKT format. Only used if data is provide as a numeric array, geotransform: GDAL geotransform (6-tuple). (top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution). See e.g. http://www.gdal.org/gdal_tutorial.html Only used if data is provide as a numeric array, name: Optional name for layer. Only used if data is provide as a numeric array, keywords: Optional dictionary with keywords that describe the layer. When the layer is stored, these keywords will be written into an associated file with extension .keywords. Keywords can for example be used to display text about the layer in a web application. Note that if data is a filename, all other arguments are ignored as they will be inferred from the file. """ # Input checks if data is None: # Instantiate empty object self.name = name self.data = None self.projection = None self.coordinates = None self.filename = None self.keywords = {} return # Initialisation if isinstance(data, basestring): self.read_from_file(data) else: # Assume that data is provided as an array # with extra keyword arguments supplying metadata if keywords is None: self.keywords = {} else: msg = ('Specified keywords must be either None or a ' 'dictionary. I got %s' % keywords) verify(isinstance(keywords, dict), msg) self.keywords = keywords if style_info is None: self.style_info = {} else: msg = ('Specified style_info must be either None or a ' 'dictionary. I got %s' % style_info) verify(isinstance(style_info, dict), msg) self.style_info = style_info self.data = numpy.array(data, dtype='d', copy=False) self.filename = None self.name = name self.projection = Projection(projection) self.geotransform = geotransform self.rows = data.shape[0] self.columns = data.shape[1] self.number_of_bands = 1
raise Exception(msg) if native: keywords = self.get_keywords() if 'resolution' in keywords: resolution = keywords['resolution'] try: res = float(resolution) except: # Assume resolution is a string of the form: # (0.00045228819716044, 0.00045228819716044) msg = ('Unknown format for resolution keyword: %s' % resolution) verify((resolution.startswith('(') and resolution.endswith(')')), msg) dx, dy = [float(s) for s in resolution[1:-1].split(',')] if not isotropic: res = (dx, dy) else: msg = ('Resolution of layer "%s" was not isotropic: ' '[dx, dy] == %s' % (self.get_name(), res)) verify(numpy.allclose(dx, dy, rtol=1.0e-12, atol=1.0e-12), msg) res = dx else: if not isotropic: res = (res, res) # Return either 2-tuple or scale depending on isotropic