def test_riab_interpolation(self): """Interpolation using Raster and Vector objects """ # Create test data lon_ul = 100 # Longitude of upper left corner lat_ul = 10 # Latitude of upper left corner numlon = 8 # Number of longitudes numlat = 5 # Number of latitudes dlon = 1 dlat = -1 # Define array where latitudes are rows and longitude columns A = numpy.zeros((numlat, numlon)) # Establish coordinates for lower left corner lat_ll = lat_ul - numlat lon_ll = lon_ul # Define pixel centers along each direction longitudes = numpy.linspace(lon_ll + 0.5, lon_ll + numlon - 0.5, numlon) latitudes = numpy.linspace(lat_ll + 0.5, lat_ll + numlat - 0.5, numlat) # Define raster with latitudes going bottom-up (south to north). # Longitudes go left-right (west to east) for i in range(numlat): for j in range(numlon): A[numlat - 1 - i, j] = linear_function(longitudes[j], latitudes[i]) # Create bilinear interpolation function F = raster_spline(longitudes, latitudes, A) # Write array to a raster file geotransform = (lon_ul, dlon, 0, lat_ul, 0, dlat) projection = ('GEOGCS["GCS_WGS_1984",' 'DATUM["WGS_1984",' 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' 'PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]]') raster_filename = unique_filename(suffix='.tif') write_raster_data(A, projection, geotransform, raster_filename) # Write test interpolation point to a vector file coordinates = [] for xi in longitudes: for eta in latitudes: coordinates.append((xi, eta)) vector_filename = unique_filename(suffix='.shp') write_point_data(data=None, projection=projection, geometry=coordinates, filename=vector_filename) # Read both datasets back in R = read_layer(raster_filename) V = read_layer(vector_filename) # Then test that axes and data returned by R are correct x, y = R.get_geometry() msg = 'X axes was %s, should have been %s' % (longitudes, x) assert numpy.allclose(longitudes, x), msg msg = 'Y axes was %s, should have been %s' % (latitudes, y) assert numpy.allclose(latitudes, y), msg AA = R.get_data() msg = 'Raster data was %s, should have been %s' % (AA, A) assert numpy.allclose(AA, A), msg # Test riab's interpolation function I = R.interpolate(V, name='value') Icoordinates = I.get_geometry() Iattributes = I.get_data() assert numpy.allclose(Icoordinates, coordinates) # Test that interpolated points are correct for i, (xi, eta) in enumerate(Icoordinates): z = Iattributes[i]['value'] #print xi, eta, z, linear_function(xi, eta) assert numpy.allclose(z, linear_function(xi, eta), rtol=1e-12) # FIXME (Ole): Need test for values outside grid. # They should be NaN or something # Cleanup # FIXME (Ole): Shape files are a collection of files. How to remove? os.remove(vector_filename)
def test_reading_and_writing_of_vector_data(self): """Vector data can be read and written correctly """ # First test that some error conditions are caught filename = unique_filename(suffix='nshoe66u') try: read_layer(filename) except Exception: pass else: msg = 'Exception for unknown extension should have been raised' raise Exception(msg) filename = unique_filename(suffix='.gml') try: read_layer(filename) except IOError: pass else: msg = 'Exception for non-existing file should have been raised' raise Exception(msg) # Read and verify test data for vectorname in ['lembang_schools.shp', 'tsunami_exposure_BB.shp']: filename = '%s/%s' % (TESTDATA, vectorname) layer = read_layer(filename) coords = layer.get_geometry() attributes = layer.get_data() # Check basic data integrity N = len(layer) assert coords.shape[0] == N assert coords.shape[1] == 2 assert len(layer) == N assert isinstance(layer.get_name(), basestring) # Check projection wkt = layer.get_projection(proj4=False) assert wkt.startswith('GEOGCS') assert layer.projection == Projection(DEFAULT_PROJECTION) # Check integrity of each feature field_names = None for i in range(N): # Consistency between of geometry and fields x1 = coords[i, 0] x2 = attributes[i]['LONGITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg x1 = coords[i, 1] x2 = attributes[i]['LATITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg # Verify that each feature has the same fields if field_names is None: field_names = attributes[i].keys() else: assert len(field_names) == len(attributes[i].keys()) assert field_names == attributes[i].keys() # Write data back to file # FIXME (Ole): I would like to use gml here, but OGR does not # store the spatial reference! out_filename = unique_filename(suffix='.shp') write_point_data(attributes, wkt, coords, out_filename) # Read again and check layer = read_layer(out_filename) coords = layer.get_geometry() attributes = layer.get_data() # Check basic data integrity N = len(layer) assert coords.shape[0] == N assert coords.shape[1] == 2 # Check projection assert layer.projection == Projection(DEFAULT_PROJECTION) # Check integrity of each feature field_names = None for i in range(N): # Consistency between of geometry and fields x1 = coords[i, 0] x2 = attributes[i]['LONGITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg x1 = coords[i, 1] x2 = attributes[i]['LATITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg # Verify that each feature has the same fields if field_names is None: field_names = attributes[i].keys() else: assert len(field_names) == len(attributes[i].keys()) assert field_names == attributes[i].keys() # Test individual extraction lon = layer.get_data(attribute='LONGITUDE') assert numpy.allclose(lon, coords[:, 0])