def test_io(self): """Data can be uploaded and downloaded from internal GeoServer """ # Upload a raster and a vector data set for filename in ['population_padang_1.asc', 'lembang_schools.shp']: basename, ext = os.path.splitext(filename) filename = os.path.join(TESTDATA, filename) layer = save_to_geonode(filename, user=self.user, overwrite=True) # Name checking layer_name = layer.name expected_name = basename.lower() msg = 'Expected layername %s but got %s' % (expected_name, layer_name) assert layer_name == expected_name, msg workspace = layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' # Check metadata assert_bounding_box_matches(layer, filename) # Download layer again using workspace:name bbox = get_bounding_box(filename) downloaded_layer = download(INTERNAL_SERVER_URL, '%s:%s' % (workspace, layer_name), bbox) assert os.path.exists(downloaded_layer.filename)
def XXtest_shakemap_population_exposure(self): """Population exposed to groundshaking matches USGS numbers """ hazardfile = os.path.join(TEST_DATA, 'shakemap_sumatra_20110129.tif') hazard_layer = save_to_geonode(hazardfile, overwrite=True, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TEST_DATA, 'population_indonesia_2008.tif') exposure_layer = save_to_geonode(exposurefile, overwrite=True, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) #with warnings.catch_warnings(): # warnings.simplefilter('ignore') c = Client() rv = c.post('/api/v1/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=get_bounding_box_string(hazardfile), impact_function='USGSFatalityFunction', impact_level=10, keywords='test,shakemap,usgs', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box(hazardfile)) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) H = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_raster = read_layer(result_layer.filename) I = impact_raster.get_data()
def test_io(self): """Data can be uploaded and downloaded from internal GeoServer """ # Upload a raster and a vector data set for filename in ['lembang_mmi_hazmap.asc', 'lembang_schools.shp']: basename, ext = os.path.splitext(filename) filename = os.path.join(TEST_DATA, filename) layer = save_to_geonode(filename, user=self.user) # Name checking layer_name = layer.name workspace = layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' msg = 'Expected layer name to be "geonode". Got %s' % workspace assert workspace == 'geonode', msg # Check metadata assert isinstance(layer.geographic_bounding_box, basestring) # Exctract bounding bounding box from layer handle s = 'POLYGON((' i = layer.geographic_bounding_box.find(s) + len(s) assert i > len(s) j = layer.geographic_bounding_box.find('))') assert j > i bbox_string = str(layer.geographic_bounding_box[i:j]) A = numpy.array([[float(x[0]), float(x[1])] for x in (p.split() for p in bbox_string.split(','))]) south = min(A[:, 1]) north = max(A[:, 1]) west = min(A[:, 0]) east = max(A[:, 0]) bbox = [west, south, east, north] # Check correctness of bounding box against reference ref_bbox = get_bounding_box(filename) msg = ('Bounding box from layer handle "%s" was not as expected.\n' 'Got %s, expected %s' % (layer_name, bbox, ref_bbox)) assert numpy.allclose(bbox, ref_bbox), msg # Download layer again using workspace:name downloaded_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(downloaded_layer.filename)
def XXtest_shakemap_population_exposure(self): """Population exposed to groundshaking matches USGS numbers """ hazardfile = os.path.join(TESTDATA, 'shakemap_sumatra_20110129.tif') hazard_layer = save_to_geonode(hazardfile, overwrite=True, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TESTDATA, 'population_indonesia_2008.tif') exposure_layer = save_to_geonode(exposurefile, overwrite=True, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) #with warnings.catch_warnings(): # warnings.simplefilter('ignore') c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=get_bounding_box_string(hazardfile), impact_function='USGSFatalityFunction', keywords='test,shakemap,usgs')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box(hazardfile)) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) H = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_raster = read_layer(result_layer.filename) I = impact_raster.get_data()
def test_get_bounding_box(self): """Bounding box is correctly extracted from file. # Reference data: gdalinfo Earthquake_Ground_Shaking_clip.tif Driver: GTiff/GeoTIFF Files: Earthquake_Ground_Shaking_clip.tif Size is 345, 263 Coordinate System is: GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.2572235630016, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0], UNIT["degree",0.0174532925199433], AUTHORITY["EPSG","4326"]] Origin = (99.364169565217395,-0.004180608365019) Pixel Size = (0.008339130434783,-0.008361216730038) Metadata: AREA_OR_POINT=Point TIFFTAG_XRESOLUTION=1 TIFFTAG_YRESOLUTION=1 TIFFTAG_RESOLUTIONUNIT=1 (unitless) Image Structure Metadata: COMPRESSION=LZW INTERLEAVE=BAND Corner Coordinates: Upper Left ( 99.3641696, -0.0041806) ( 99d21'51.01"E, 0d 0'15.05"S) Lower Left ( 99.3641696, -2.2031806) ( 99d21'51.01"E, 2d12'11.45"S) Upper Right ( 102.2411696, -0.0041806) (102d14'28.21"E, 0d 0'15.05"S) Lower Right ( 102.2411696, -2.2031806) (102d14'28.21"E, 2d12'11.45"S) Center ( 100.8026696, -1.1036806) (100d48'9.61"E, 1d 6'13.25"S) Band 1 Block=256x256 Type=Float64, ColorInterp=Gray """ ref_bbox = {'tsunami_exposure_BB.shp': [150.124, -35.7856, 150.295, -35.6546], 'Earthquake_Ground_Shaking_clip.tif': [99.3641696, -2.2031806, 102.2411696, -0.0041806]} for filename in ['Earthquake_Ground_Shaking_clip.tif', 'tsunami_exposure_BB.shp']: bbox = get_bounding_box(os.path.join(TESTDATA, filename)) msg = ('Got bbox %s from filename %s, but expected %s ' % (str(bbox), filename, str(ref_bbox[filename]))) assert numpy.allclose(bbox, ref_bbox[filename]), msg
def test_metadata(self): """Metadata is retrieved correctly for both raster and vector data """ # Upload test data filenames = ['Lembang_Earthquake_Scenario.asc', 'Earthquake_Ground_Shaking.asc', 'lembang_schools.shp', 'Padang_WGS84.shp'] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(TESTDATA, filename) layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): if filenames[i].endswith('.shp'): layer_type = 'vector' elif filenames[i].endswith('.asc'): layer_type = 'raster' else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filenames[i]) raise Exception(msg) layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, paths[i]) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(paths[i]) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (paths[i], ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = 'Did not find key "subcategory" in keywords: %s' % keywords assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext(os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata(self): """Metadata is retrieved correctly for both raster and vector data """ # Upload test data filenames = [ 'Lembang_Earthquake_Scenario.asc', 'Earthquake_Ground_Shaking.asc', 'lembang_schools.shp', 'Padang_WGS84.shp' ] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(TESTDATA, filename) layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): if filenames[i].endswith('.shp'): layer_type = 'vector' elif filenames[i].endswith('.asc'): layer_type = 'raster' else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filenames[i]) raise Exception(msg) layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, paths[i]) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(paths[i]) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (paths[i], ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = 'Did not find key "subcategory" in keywords: %s' % keywords assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext( os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg