def test_keywords_with_colon(self): """Keywords and values with colons raise error messages """ # Colon in key kwd_filename = unique_filename(suffix='.keywords') keywords = {'with_a_colon:in_it': 'value'} # This one is illegal try: write_keywords(keywords, kwd_filename) except AssertionError: pass else: msg = 'Colon in keywords key %s was not caught' % keywords raise Exception(msg) # Colon in value kwd_filename = unique_filename(suffix='.keywords') keywords = {'with_a_colon': 'take: that!'} # This one is illegal try: write_keywords(keywords, kwd_filename) except AssertionError: pass else: msg = 'Colon in keywords value %s was not caught' % keywords raise Exception(msg)
def test_keywords_file(self): """Keywords can be written and read """ kwd_filename = unique_filename(suffix='.keywords') keywords = {'caption': 'Describing the layer', 'category': 'impact', 'subcategory': 'flood', 'layer': None, 'with spaces': 'trailing_ws '} write_keywords(keywords, kwd_filename) msg = 'Keywords file %s was not created' % kwd_filename assert os.path.isfile(kwd_filename), msg x = read_keywords(kwd_filename) os.remove(kwd_filename) assert isinstance(x, dict) # Check keyword names for key in x: msg = 'Read unexpected key %s' % key assert key in keywords, msg for key in keywords: msg = 'Expected key %s was not read from %s' % (key, kwd_filename) assert key in x, msg # Check keyword values for key in keywords: refval = keywords[key] newval = x[key] if refval is None: assert newval is None else: msg = ('Expected value %s was not read from %s. ' 'I got %s' % (refval, kwd_filename, newval)) assert refval.strip() == newval, msg # Check catching of wrong extension kwd_filename = unique_filename(suffix='.xxxx') try: write_keywords(keywords, kwd_filename) except: pass else: msg = 'Should have raised assertion error for wrong extension' raise Exception(msg)
def calculate_impact(layers, impact_fcn, comment=''): """Calculate impact levels as a function of list of input layers Input FIXME (Ole): For the moment we take only a list with two elements containing one hazard level one exposure level layers: List of Raster and Vector layer objects to be used for analysis impact_fcn: Function of the form f(layers) comment: Output filename of resulting impact layer (GML). Comment is embedded as metadata. Filename is generated from input data and date. Note The admissible file types are tif and asc/prj for raster and gml or shp for vector data Assumptions 1. All layers are in WGS84 geographic coordinates 2. Layers are equipped with metadata such as names and categories """ # Input checks check_data_integrity(layers) # Get an instance of the passed impact_fcn impact_function = impact_fcn() # Pass input layers to plugin # FIXME (Ole): When issue #21 has been fully implemented, this # return value should be a list of layers. F = impact_function.run(layers) # Write result and return filename if F.is_raster: extension = '.tif' # use default style for raster else: extension = '.shp' # use default style for vector output_filename = unique_filename(suffix=extension) F.write_to_file(output_filename) # Generate style as defined by the impact_function style = impact_function.generate_style(F) f = open(output_filename.replace(extension, '.sld'), 'w') f.write(style) f.close() return output_filename
def test_empty_keywords_file(self): """Empty keywords can be handled """ kwd_filename = unique_filename(suffix='.keywords') write_keywords({}, kwd_filename) msg = 'Keywords file %s was not created' % kwd_filename assert os.path.isfile(kwd_filename), msg x = read_keywords(kwd_filename) os.remove(kwd_filename) assert isinstance(x, dict) assert len(x) == 0
def test_vector_class(self): """Consistency of vector class for point data """ # Read data file layername = 'lembang_schools.shp' filename = '%s/%s' % (TESTDATA, layername) V = read_layer(filename) # Make a smaller dataset V_ref = V.get_topN('FLOOR_AREA', 5) geometry = V_ref.get_geometry() data = V_ref.get_data() projection = V_ref.get_projection() # Create new object from test data V_new = Vector(data=data, projection=projection, geometry=geometry) # Check assert V_new == V_ref assert not V_new != V_ref # Write this new object, read it again and check tmp_filename = unique_filename(suffix='.shp') V_new.write_to_file(tmp_filename) V_tmp = read_layer(tmp_filename) assert V_tmp == V_ref assert not V_tmp != V_ref # Check that equality raises exception when type is wrong try: V_tmp == Raster() except TypeError: pass else: msg = 'Should have raised TypeError' raise Exception(msg)
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext(os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def save_file_to_geonode(filename, user=None, title=None, overwrite=True, check_metadata=True, ignore=None): """Save a single layer file to local Risiko GeoNode Input filename: Layer filename of type as defined in LAYER_TYPES user: Django User object title: String describing the layer. If None or '' the filename will be used. overwrite: Boolean variable controlling whether existing layers can be overwritten by this operation. Default is True check_metadata: Flag controlling whether metadata is verified. If True (default), an exception will be raised if metada is not available after a number of retries. If False, no check is done making the function faster. Output layer object """ if ignore is not None and filename == ignore: return None # Extract fully qualified basename and extension basename, extension = os.path.splitext(filename) if extension not in LAYER_TYPES: msg = ('Invalid file extension in file %s. Valid extensions are ' '%s' % (filename, str(LAYER_TYPES))) raise RisikoException(msg) # Use file name to derive title if not specified if title is None or title == '': title = os.path.split(basename)[-1] # Try to find a file with a .keywords extension # and create a keywords list from there. # It is assumed that the keywords are separated # by new lines. # Empty keyword lines are ignored (as this causes issues downstream) keyword_list = [] keyword_file = basename + '.keywords' if os.path.exists(keyword_file): f = open(keyword_file, 'r') for line in f.readlines(): # Ignore blank lines raw_keyword = line.strip() if raw_keyword == '': continue # Strip any spaces after or before the colons if present if ':' in raw_keyword: keyword = ':'.join([x.strip() for x in raw_keyword.split(':')]) # Store keyword keyword_list.append(keyword) f.close() # Take care of file types if extension == '.asc': # We assume this is an AAIGrid ASCII file such as those generated by # ESRI and convert it to Geotiff before uploading. # Create temporary tif file for upload and check that the road is clear prefix = os.path.split(basename)[-1] upload_filename = unique_filename(prefix=prefix, suffix='.tif') upload_basename, extension = os.path.splitext(upload_filename) # Copy any metadata files to unique filename for ext in ['.sld', '.keywords']: if os.path.exists(basename + ext): cmd = 'cp %s%s %s%s' % (basename, ext, upload_basename, ext) run(cmd) # Check that projection file exists prjname = basename + '.prj' if not os.path.isfile(prjname): msg = ('File %s must have a projection file named ' '%s' % (filename, prjname)) raise RisikoException(msg) # Convert ASCII file to GeoTIFF R = read_layer(filename) R.write_to_file(upload_filename) else: # The specified file is the one to upload upload_filename = filename # Attempt to upload the layer try: # Upload layer = file_upload(upload_filename, user=user, title=title, keywords=keyword_list, overwrite=overwrite) # FIXME (Ole): This is some kind of hack that should be revisited. layer.keywords = ' '.join(keyword_list) layer.save() except GeoNodeException, e: # Layer did not upload. Convert GeoNodeException to RisikoException raise RisikoException(e)
def test_riab_interpolation(self): """Interpolation using Raster and Vector objects """ # Create test data lon_ul = 100 # Longitude of upper left corner lat_ul = 10 # Latitude of upper left corner numlon = 8 # Number of longitudes numlat = 5 # Number of latitudes dlon = 1 dlat = -1 # Define array where latitudes are rows and longitude columns A = numpy.zeros((numlat, numlon)) # Establish coordinates for lower left corner lat_ll = lat_ul - numlat lon_ll = lon_ul # Define pixel centers along each direction longitudes = numpy.linspace(lon_ll + 0.5, lon_ll + numlon - 0.5, numlon) latitudes = numpy.linspace(lat_ll + 0.5, lat_ll + numlat - 0.5, numlat) # Define raster with latitudes going bottom-up (south to north). # Longitudes go left-right (west to east) for i in range(numlat): for j in range(numlon): A[numlat - 1 - i, j] = linear_function(longitudes[j], latitudes[i]) # Write array to a raster file geotransform = (lon_ul, dlon, 0, lat_ul, 0, dlat) projection = ('GEOGCS["GCS_WGS_1984",' 'DATUM["WGS_1984",' 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' 'PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]]') raster_filename = unique_filename(suffix='.tif') write_raster_data(A, projection, geotransform, raster_filename) # Write test interpolation point to a vector file coordinates = [] for xi in longitudes: for eta in latitudes: coordinates.append((xi, eta)) vector_filename = unique_filename(suffix='.shp') write_vector_data(data=None, projection=projection, geometry=coordinates, filename=vector_filename) # Read both datasets back in R = read_layer(raster_filename) V = read_layer(vector_filename) # Then test that axes and data returned by R are correct x, y = R.get_geometry() msg = 'X axes was %s, should have been %s' % (longitudes, x) assert numpy.allclose(longitudes, x), msg msg = 'Y axes was %s, should have been %s' % (latitudes, y) assert numpy.allclose(latitudes, y), msg AA = R.get_data() msg = 'Raster data was %s, should have been %s' % (AA, A) assert numpy.allclose(AA, A), msg # Test riab's interpolation function I = R.interpolate(V, name='value') Icoordinates = I.get_geometry() Iattributes = I.get_data() assert numpy.allclose(Icoordinates, coordinates) # Test that interpolated points are correct for i, (xi, eta) in enumerate(Icoordinates): z = Iattributes[i]['value'] #print xi, eta, z, linear_function(xi, eta) assert numpy.allclose(z, linear_function(xi, eta), rtol=1e-12) # FIXME (Ole): Need test for values outside grid. # They should be NaN or something # Cleanup # FIXME (Ole): Shape files are a collection of files. How to remove? os.remove(vector_filename)
def save_file_to_geonode(filename, user=None, title=None, overwrite=True, check_metadata=True, ignore=None): """Save a single layer file to local Risiko GeoNode Input filename: Layer filename of type as defined in LAYER_TYPES user: Django User object title: String describing the layer. If None or '' the filename will be used. overwrite: Boolean variable controlling whether existing layers can be overwritten by this operation. Default is True check_metadata: Flag controlling whether metadata is verified. If True (default), an exception will be raised if metada is not available after a number of retries. If False, no check is done making the function faster. Output layer object """ if ignore is not None and filename == ignore: return None # Extract fully qualified basename and extension basename, extension = os.path.splitext(filename) if extension not in LAYER_TYPES: msg = ('Invalid file extension in file %s. Valid extensions are ' '%s' % (filename, str(LAYER_TYPES))) raise RisikoException(msg) # Use file name to derive title if not specified if title is None or title == '': title = os.path.split(basename)[-1] # Try to find a file with a .keywords extension # and create a keywords list from there. # It is assumed that the keywords are separated # by new lines. # Empty keyword lines are ignored (as this causes issues downstream) keyword_list = [] keyword_file = basename + '.keywords' if os.path.exists(keyword_file): f = open(keyword_file, 'r') for line in f.readlines(): # Ignore blank lines raw_keyword = line.strip() if raw_keyword == '': continue # Strip any spaces after or before the colons if present if ':' in raw_keyword: keyword = ':'.join([x.strip() for x in raw_keyword.split(':')]) # FIXME (Ole): Replace spaces by underscores and store keyword. # See issue #148 keyword_list.append(keyword.replace(' ', '_')) f.close() # Take care of file types if extension == '.asc': # We assume this is an AAIGrid ASCII file such as those generated by # ESRI and convert it to Geotiff before uploading. # Create temporary tif file for upload and check that the road is clear prefix = os.path.split(basename)[-1] upload_filename = unique_filename(prefix=prefix, suffix='.tif') upload_basename, extension = os.path.splitext(upload_filename) # Copy any metadata files to unique filename for ext in ['.sld', '.keywords']: if os.path.exists(basename + ext): cmd = 'cp %s%s %s%s' % (basename, ext, upload_basename, ext) run(cmd) # Check that projection file exists prjname = basename + '.prj' if not os.path.isfile(prjname): msg = ('File %s must have a projection file named ' '%s' % (filename, prjname)) raise RisikoException(msg) # Convert ASCII file to GeoTIFF R = read_layer(filename) R.write_to_file(upload_filename) else: # The specified file is the one to upload upload_filename = filename # Attempt to upload the layer try: # Upload layer = file_upload(upload_filename, user=user, title=title, keywords=keyword_list, overwrite=overwrite) # FIXME (Ole): This workaround should be revisited. # This fx means that keywords can't have spaces # Really need a generic way of getting this kind of # info in and out of GeoNode layer.keywords = ' '.join(keyword_list) layer.save() except GeoNodeException, e: # Layer did not upload. Convert GeoNodeException to RisikoException raise RisikoException(e)
def save_file_to_geonode(filename, user=None, title=None, overwrite=False): """Save a single layer file to local Risiko GeoNode Input filename: Layer filename of type as defined in LAYER_TYPES user: Django User object title: String describing the layer. If None or '' the filename will be used. overwrite: Boolean variable controlling whether existing layers can be overwritten by this operation. Default is False Output layer object """ # Extract fully qualified basename and extension basename, extension = os.path.splitext(filename) if extension not in LAYER_TYPES: msg = ('Invalid file extension in file %s. Valid extensions are ' '%s' % (filename, str(LAYER_TYPES))) raise RisikoException(msg) # Use file name to derive title if not specified if title is None or title == '': title = os.path.split(basename)[-1] # Try to find a file with a .keywords extension # and create a keywords list from there. # It is assumed that the keywords are separated # by new lines. keyword_list = [] keyword_file = basename + '.keywords' if os.path.exists(keyword_file): f = open(keyword_file, 'r') for line in f.readlines(): # Strip any spaces after or before the colons if present raw_keyword = line.strip() if ':' in raw_keyword: keyword = ':'.join([x.strip() for x in raw_keyword.split(':')]) keyword_list.append(keyword) f.close() # Take care of file types if extension == '.asc': # We assume this is an AAIGrid ASCII file such as those generated by # ESRI and convert it to Geotiff before uploading. # Create temporary tif file for upload and check that the road is clear # FIXME (Ole): for some reason, these files tend to hang around # - especially after interrupts so we'll go for temporary filenames # for the time being. prefix=os.path.split(basename)[-1] upload_filename = unique_filename(prefix=prefix, suffix='.tif') upload_basename, extension = os.path.splitext(upload_filename) # Copy any metadata files to unique filename for ext in ['.sld', '.keywords']: if os.path.exists(basename+ext): cmd = 'cp %s%s %s%s' % (basename, ext, upload_basename, ext) run(cmd) #msg = ('You have asked to upload the ASCII file "%s" and to do so I ' # 'must first convert it to the TIF format. However, there is ' # 'already a file named "%s" so you have to remove that first ' # 'and try again. Sorry about that.' % (filename, # upload_filename)) #assert not os.path.exists(upload_filename), msg # Check that projection file exists prjname = basename + '.prj' if not os.path.isfile(prjname): msg = ('File %s must have a projection file named ' '%s' % (filename, prjname)) raise RisikoException(msg) # Convert ASCII file to GeoTIFF cmd = ('gdal_translate -ot Float64 -of GTiff -co "PROFILE=GEOTIFF" ' '%s %s' % (filename, upload_filename)) run(cmd, stdout='%s_asc2tif_conversion.stdout' % basename, stderr='%s_asc2tif_conversion.stderr' % basename) else: # The specified file is the one to upload upload_filename = filename # Attempt to upload the layer try: # Upload layer = file_upload(upload_filename, user=user, title=title, keywords=keyword_list, overwrite=overwrite) except GeoNodeException, e: # Layer did not upload. Convert GeoNodeException to RisikoException raise RisikoException(e)
def test_reading_and_writing_of_vector_data(self): """Vector data can be read and written correctly """ # First test that some error conditions are caught filename = unique_filename(suffix='nshoe66u') try: read_layer(filename) except Exception: pass else: msg = 'Exception for unknown extension should have been raised' raise Exception(msg) filename = unique_filename(suffix='.gml') try: read_layer(filename) except IOError: pass else: msg = 'Exception for non-existing file should have been raised' raise Exception(msg) # Read and verify test data for vectorname in ['lembang_schools.shp', 'tsunami_exposure_BB.shp']: filename = '%s/%s' % (TESTDATA, vectorname) layer = read_layer(filename) coords = layer.get_geometry() attributes = layer.get_data() # Check basic data integrity N = len(layer) assert coords.shape[0] == N assert coords.shape[1] == 2 assert len(layer) == N assert isinstance(layer.get_name(), basestring) # Check projection wkt = layer.get_projection(proj4=False) assert wkt.startswith('GEOGCS') assert layer.projection == Projection(DEFAULT_PROJECTION) # Check integrity of each feature field_names = None for i in range(N): # Consistency between of geometry and fields x1 = coords[i, 0] x2 = attributes[i]['LONGITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg x1 = coords[i, 1] x2 = attributes[i]['LATITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg # Verify that each feature has the same fields if field_names is None: field_names = attributes[i].keys() else: assert len(field_names) == len(attributes[i].keys()) assert field_names == attributes[i].keys() # Write data back to file # FIXME (Ole): I would like to use gml here, but OGR does not # store the spatial reference! out_filename = unique_filename(suffix='.shp') write_point_data(attributes, wkt, coords, out_filename) # Read again and check layer = read_layer(out_filename) coords = layer.get_geometry() attributes = layer.get_data() # Check basic data integrity N = len(layer) assert coords.shape[0] == N assert coords.shape[1] == 2 # Check projection assert layer.projection == Projection(DEFAULT_PROJECTION) # Check integrity of each feature field_names = None for i in range(N): # Consistency between of geometry and fields x1 = coords[i, 0] x2 = attributes[i]['LONGITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg x1 = coords[i, 1] x2 = attributes[i]['LATITUDE'] assert x2 is not None msg = 'Inconsistent longitudes: %f != %f' % (x1, x2) assert numpy.allclose(x1, x2), msg # Verify that each feature has the same fields if field_names is None: field_names = attributes[i].keys() else: assert len(field_names) == len(attributes[i].keys()) assert field_names == attributes[i].keys() # Test individual extraction lon = layer.get_data(attribute='LONGITUDE') assert numpy.allclose(lon, coords[:, 0])
def test_rasters_and_arrays(self): """Consistency of rasters and associated arrays """ # Create test data lon_ul = 100 # Longitude of upper left corner lat_ul = 10 # Latitude of upper left corner numlon = 8 # Number of longitudes numlat = 5 # Number of latitudes dlon = 1 dlat = -1 # Define array where latitudes are rows and longitude columns A1 = numpy.zeros((numlat, numlon)) # Establish coordinates for lower left corner lat_ll = lat_ul - numlat lon_ll = lon_ul # Define pixel centers along each direction lon = numpy.linspace(lon_ll + 0.5, lon_ll + numlon - 0.5, numlon) lat = numpy.linspace(lat_ll + 0.5, lat_ll + numlat - 0.5, numlat) # Define raster with latitudes going bottom-up (south to north). # Longitudes go left-right (west to east) for i in range(numlat): for j in range(numlon): A1[numlat - 1 - i, j] = linear_function(lon[j], lat[i]) # Upper left corner assert A1[0, 0] == 105.25 assert A1[0, 0] == linear_function(lon[0], lat[4]) # Lower left corner assert A1[4, 0] == 103.25 assert A1[4, 0] == linear_function(lon[0], lat[0]) # Upper right corner assert A1[0, 7] == 112.25 assert A1[0, 7] == linear_function(lon[7], lat[4]) # Lower right corner assert A1[4, 7] == 110.25 assert A1[4, 7] == linear_function(lon[7], lat[0]) # Generate raster object and write projection = ('GEOGCS["WGS 84",' 'DATUM["WGS_1984",' 'SPHEROID["WGS 84",6378137,298.2572235630016,' 'AUTHORITY["EPSG","7030"]],' 'AUTHORITY["EPSG","6326"]],' 'PRIMEM["Greenwich",0],' 'UNIT["degree",0.0174532925199433],' 'AUTHORITY["EPSG","4326"]]') geotransform = (lon_ul, dlon, 0, lat_ul, 0, dlat) R1 = Raster(A1, projection, geotransform) msg = ('Dimensions of raster array do not match those of ' 'raster object') assert numlat == R1.rows, msg assert numlon == R1.columns, msg # Write back to new (tif) file out_filename = unique_filename(suffix='.tif') R1.write_to_file(out_filename) # Read again and check consistency R2 = read_layer(out_filename) msg = ('Dimensions of written raster array do not match those ' 'of input raster file\n') msg += (' Dimensions of input file ' '%s: (%s, %s)\n' % (R1.filename, numlat, numlon)) msg += (' Dimensions of output file %s: ' '(%s, %s)' % (R2.filename, R2.rows, R2.columns)) assert numlat == R2.rows, msg assert numlon == R2.columns, msg A2 = R2.get_data() assert numpy.allclose(numpy.min(A1), numpy.min(A2)) assert numpy.allclose(numpy.max(A1), numpy.max(A2)) msg = 'Array values of written raster array were not as expected' assert numpy.allclose(A1, A2), msg msg = 'Geotransforms were different' assert R1.get_geotransform() == R2.get_geotransform(), msg p1 = R1.get_projection(proj4=True) p2 = R2.get_projection(proj4=True) msg = 'Projections were different: %s != %s' % (p1, p2) assert p1 == p1, msg # Exercise projection __eq__ method assert R1.projection == R2.projection # Check that equality raises exception when type is wrong try: R1.projection == 234 except TypeError: pass else: msg = 'Should have raised TypeError' raise Exception(msg)
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext( os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_reading_and_writing_of_real_rasters(self): """Rasters can be read and written correctly """ for rastername in ['Earthquake_Ground_Shaking_clip.tif', 'Population_2010_clip.tif', 'shakemap_padang_20090930.asc', 'population_padang_1.asc', 'population_padang_2.asc']: filename = '%s/%s' % (TESTDATA, rastername) R1 = read_layer(filename) # Check consistency of raster A1 = R1.get_data() M, N = A1.shape msg = ('Dimensions of raster array do not match those of ' 'raster file %s' % R1.filename) assert M == R1.rows, msg assert N == R1.columns, msg # Write back to new (tif) file out_filename = unique_filename(suffix='.tif') write_raster_data(A1, R1.get_projection(), R1.get_geotransform(), out_filename) # Read again and check consistency R2 = read_layer(out_filename) msg = ('Dimensions of written raster array do not match those ' 'of input raster file\n') msg += (' Dimensions of input file ' '%s: (%s, %s)\n' % (R1.filename, M, N)) msg += (' Dimensions of output file %s: ' '(%s, %s)' % (R2.filename, R2.rows, R2.columns)) assert M == R2.rows, msg assert N == R2.columns, msg A2 = R2.get_data() assert numpy.allclose(numpy.min(A1), numpy.min(A2)) assert numpy.allclose(numpy.max(A1), numpy.max(A2)) msg = 'Array values of written raster array were not as expected' assert numpy.allclose(A1, A2), msg msg = 'Geotransforms were different' assert R1.get_geotransform() == R2.get_geotransform(), msg p1 = R1.get_projection(proj4=True) p2 = R2.get_projection(proj4=True) msg = 'Projections were different: %s != %s' % (p1, p2) assert p1 == p1, msg # Use overridden == and != to verify assert R1 == R2 assert not R1 != R2 # Check that equality raises exception when type is wrong try: R1 == Vector() except TypeError: pass else: msg = 'Should have raised TypeError' raise Exception(msg)
def test_riab_interpolation(self): """Interpolation using Raster and Vector objects """ # Create test data lon_ul = 100 # Longitude of upper left corner lat_ul = 10 # Latitude of upper left corner numlon = 8 # Number of longitudes numlat = 5 # Number of latitudes dlon = 1 dlat = -1 # Define array where latitudes are rows and longitude columns A = numpy.zeros((numlat, numlon)) # Establish coordinates for lower left corner lat_ll = lat_ul - numlat lon_ll = lon_ul # Define pixel centers along each direction longitudes = numpy.linspace(lon_ll + 0.5, lon_ll + numlon - 0.5, numlon) latitudes = numpy.linspace(lat_ll + 0.5, lat_ll + numlat - 0.5, numlat) # Define raster with latitudes going bottom-up (south to north). # Longitudes go left-right (west to east) for i in range(numlat): for j in range(numlon): A[numlat - 1 - i, j] = linear_function(longitudes[j], latitudes[i]) # Create bilinear interpolation function F = raster_spline(longitudes, latitudes, A) # Write array to a raster file geotransform = (lon_ul, dlon, 0, lat_ul, 0, dlat) projection = ('GEOGCS["GCS_WGS_1984",' 'DATUM["WGS_1984",' 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' 'PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]]') raster_filename = unique_filename(suffix='.tif') write_raster_data(A, projection, geotransform, raster_filename) # Write test interpolation point to a vector file coordinates = [] for xi in longitudes: for eta in latitudes: coordinates.append((xi, eta)) vector_filename = unique_filename(suffix='.shp') write_point_data(data=None, projection=projection, geometry=coordinates, filename=vector_filename) # Read both datasets back in R = read_layer(raster_filename) V = read_layer(vector_filename) # Then test that axes and data returned by R are correct x, y = R.get_geometry() msg = 'X axes was %s, should have been %s' % (longitudes, x) assert numpy.allclose(longitudes, x), msg msg = 'Y axes was %s, should have been %s' % (latitudes, y) assert numpy.allclose(latitudes, y), msg AA = R.get_data() msg = 'Raster data was %s, should have been %s' % (AA, A) assert numpy.allclose(AA, A), msg # Test riab's interpolation function I = R.interpolate(V, name='value') Icoordinates = I.get_geometry() Iattributes = I.get_data() assert numpy.allclose(Icoordinates, coordinates) # Test that interpolated points are correct for i, (xi, eta) in enumerate(Icoordinates): z = Iattributes[i]['value'] #print xi, eta, z, linear_function(xi, eta) assert numpy.allclose(z, linear_function(xi, eta), rtol=1e-12) # FIXME (Ole): Need test for values outside grid. # They should be NaN or something # Cleanup # FIXME (Ole): Shape files are a collection of files. How to remove? os.remove(vector_filename)
def test_reading_and_writing_of_vector_polygon_data(self): """Vector polygon data can be read and written correctly """ # Read and verify test data vectorname = 'kecamatan_geo.shp' filename = '%s/%s' % (TESTDATA, vectorname) layer = read_layer(filename) geometry = layer.get_geometry() attributes = layer.get_data() # Check basic data integrity N = len(layer) assert len(geometry) == N assert len(attributes) == N assert len(attributes[0]) == 8 for i in range(N): assert geometry[i].shape[0] > 0 assert geometry[i].shape[1] == 2 assert isinstance(layer.get_name(), basestring) # Check projection wkt = layer.get_projection(proj4=False) assert wkt.startswith('GEOGCS') assert layer.projection == Projection(DEFAULT_PROJECTION) # Check integrity of each feature expected_features = {13: {'AREA': 28760732, 'POP_2007': 255383, 'KECAMATAN': 'kali deres', 'KEPADATAN': 60, 'PROPINSI': 'DKI JAKARTA'}, 21: {'AREA': 13155073, 'POP_2007': 247747, 'KECAMATAN': 'kramat jati', 'KEPADATAN': 150, 'PROPINSI': 'DKI JAKARTA'}, 35: {'AREA': 4346540, 'POP_2007': 108274, 'KECAMATAN': 'senen', 'KEPADATAN': 246, 'PROPINSI': 'DKI JAKARTA'}} field_names = None for i in range(N): # Consistency with attributes read manually with qgis if i in expected_features: att = attributes[i] exp = expected_features[i] for key in exp: msg = ('Expected attribute %s was not found in feature %i' % (key, i)) assert key in att, msg a = att[key] e = exp[key] msg = 'Got %s: "%s" but expected "%s"' % (key, a, e) assert a == e, msg # Write data back to file # FIXME (Ole): I would like to use gml here, but OGR does not # store the spatial reference! Ticket #18 out_filename = unique_filename(suffix='.shp') write_vector_data(attributes, wkt, geometry, out_filename) # Read again and check layer = read_layer(out_filename) geometry_new = layer.get_geometry() attributes_new = layer.get_data() N = len(layer) assert len(geometry_new) == N assert len(attributes_new) == N for i in range(N): assert numpy.allclose(geometry[i], geometry_new[i], rtol=1.0e-6) # OGR works in single precision assert len(attributes_new[i]) == 8 for key in attributes_new[i]: assert attributes_new[i][key] == attributes[i][key]