def test_non_existing_file(self): """RisikoException is returned for non existing file """ sampletxt = os.path.join(TEST_DATA, 'smoothoperator.shp') try: save_to_geonode(sampletxt, user=self.user) except RisikoException, e: pass
def test_extension_not_implemented(self): """RisikoException is returned for not compatible extensions """ sampletxt = os.path.join(TEST_DATA, 'lembang_schools_percentage_loss.dbf') try: save_to_geonode(sampletxt, user=self.user) except RisikoException, e: pass
def XXtest_shakemap_population_exposure(self): """Population exposed to groundshaking matches USGS numbers """ hazardfile = os.path.join(TEST_DATA, 'shakemap_sumatra_20110129.tif') hazard_layer = save_to_geonode(hazardfile, overwrite=True, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TEST_DATA, 'population_indonesia_2008.tif') exposure_layer = save_to_geonode(exposurefile, overwrite=True, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) #with warnings.catch_warnings(): # warnings.simplefilter('ignore') c = Client() rv = c.post('/api/v1/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=get_bounding_box_string(hazardfile), impact_function='USGSFatalityFunction', impact_level=10, keywords='test,shakemap,usgs', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box(hazardfile)) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) H = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_raster = read_layer(result_layer.filename) I = impact_raster.get_data()
def test_repeated_upload(self): """The same file can be uploaded more than once """ thefile = os.path.join(TEST_DATA, 'test_grid.asc') uploaded1 = save_to_geonode(thefile, overwrite=True, user=self.user) check_layer(uploaded1) uploaded2 = save_to_geonode(thefile, overwrite=True, user=self.user) check_layer(uploaded2) uploaded3 = save_to_geonode(thefile, overwrite=False, user=self.user) check_layer(uploaded3) msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name)) assert uploaded1.name == uploaded2.name, msg msg = ('Expected a different name when uploading %s using ' 'overwrite=False but got %s' % (thefile, uploaded3.name)) assert uploaded1.name != uploaded3.name, msg
def test_non_existing_dir(self): """RisikoException is returned for non existing dir """ sampletxt = os.path.join(TEST_DATA, 'smoothoperator') try: uploaded_layers = save_to_geonode(sampletxt, user=self.user) for uploaded in uploaded_layers: print uploaded except RisikoException, e: pass
def test_shapefile_without_prj(self): """Shapefile with without prj file is rejected """ thefile = os.path.join(TEST_DATA, 'lembang_schools_percentage_loss.shp') try: uploaded = save_to_geonode(thefile, user=self.user) except RisikoException, e: pass
def test_asciifile_without_prj(self): """ASCII file with without prj file is rejected """ thefile = os.path.join(TEST_DATA, 'grid_without_projection.asc') try: uploaded = save_to_geonode(thefile, user=self.user) except RisikoException, e: pass
def test_io(self): """Data can be uploaded and downloaded from internal GeoServer """ # Upload a raster and a vector data set for filename in ['lembang_mmi_hazmap.asc', 'lembang_schools.shp']: basename, ext = os.path.splitext(filename) filename = os.path.join(TEST_DATA, filename) layer = save_to_geonode(filename, user=self.user) # Name checking layer_name = layer.name workspace = layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' msg = 'Expected layer name to be "geonode". Got %s' % workspace assert workspace == 'geonode', msg # Check metadata assert isinstance(layer.geographic_bounding_box, basestring) # Exctract bounding bounding box from layer handle s = 'POLYGON((' i = layer.geographic_bounding_box.find(s) + len(s) assert i > len(s) j = layer.geographic_bounding_box.find('))') assert j > i bbox_string = str(layer.geographic_bounding_box[i:j]) A = numpy.array([[float(x[0]), float(x[1])] for x in (p.split() for p in bbox_string.split(','))]) south = min(A[:, 1]) north = max(A[:, 1]) west = min(A[:, 0]) east = max(A[:, 0]) bbox = [west, south, east, north] # Check correctness of bounding box against reference ref_bbox = get_bounding_box(filename) msg = ('Bounding box from layer handle "%s" was not as expected.\n' 'Got %s, expected %s' % (layer_name, bbox, ref_bbox)) assert numpy.allclose(bbox, ref_bbox), msg # Download layer again using workspace:name downloaded_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(downloaded_layer.filename)
def test_plugin_selection(self): """Verify the plugins can recognize compatible layers. """ # Upload a raster and a vector data set hazard_filename = os.path.join(DEMO_DATA, 'hazard', 'Lembang_Earthquake_Scenario.asc') hazard_layer = save_to_geonode(hazard_filename) exposure_filename = os.path.join(DEMO_DATA, 'exposure', 'AIBEP_schools.shp') exposure_layer = save_to_geonode(exposure_filename) c = Client() rv = c.post('/api/v1/functions/', data={}) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'functions' in data functions = data['functions'] #FIXME (Ariel): This test should implement an alternative function to # parse the requirements, but for now it will just take the buildings # damage one. for function in functions: if function['name'] == 'Earthquake School Damage Function': layers = function['layers'] msg_tmpl = 'Expected layer %s in list of compatible layers: %s' #FIXME: We have to compare by name or typename depending on whether it # is a raster or a vector. That is sad :'( hazard_msg = msg_tmpl % (hazard_layer.name, layers) assert hazard_layer.name in layers, hazard_msg exposure_msg = msg_tmpl % (exposure_layer.typename, layers) assert exposure_layer.typename in layers, exposure_msg
def test_cleanup(self): """Cleanup functions in the utils module work """ from geonode.maps.utils import cleanup thefile = os.path.join(TEST_DATA, 'lembang_mmi_hazmap.asc') uploaded = save_to_geonode(thefile, user=self.user) check_layer(uploaded) name = uploaded.name uuid = uploaded.uuid pk = uploaded.pk # try calling the cleanup function when the django record exists: try: cleanup(name, uuid) except GeoNodeException, e: pass
def test_keywords(self): """Check that keywords are read from the .keywords file """ thefile = os.path.join(TEST_DATA, 'Lembang_Earthquake_Scenario.asc') uploaded = save_to_geonode(thefile, user=self.user, overwrite=True) keywords = uploaded.keywords msg = 'No keywords found in layer %s' % uploaded.name assert len(keywords) > 0, msg keywords_file = thefile.replace('.asc', '.keywords') f = open(keywords_file, 'r') keywords_list = [] for line in f.readlines(): keywords_list.append(line.strip()) f.close() for keyword in keywords_list: msg = 'Could not find keyword "%s" in %s' % (keyword, keywords_list) assert keyword in keywords_list, msg
def test_geotransform_from_geonode(self): """Geotransforms of GeoNode layers can be correctly determined """ for filename in ['lembang_mmi_hazmap.asc', 'test_grid.asc', 'shakemap_padang_20090930.asc', 'Population_2010_clip.tif']: # Upload file to GeoNode f = os.path.join(TEST_DATA, filename) layer = save_to_geonode(f, user=self.user) name = '%s:%s' % (layer.workspace, layer.name) # Read raster file and obtain reference resolution R = read_layer(f) ref_geotransform = R.get_geotransform() # ARIEL: geotransform is a vector of six numbers: # # (top left x, w-e pixel resolution, rotation, # top left y, rotation, n-s pixel resolution). # # We should (at least) use elements 0, 1, 3, 5 # to uniquely determine if rasters are aligned # - This depends on what you can get from geonode # Get geotransform from GeoNode layer_name = layer.name metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) geotransform_name = 'geotransform' msg = ('Could not find attribute "%s" in metadata. Values are: %s' % (geotransform_name, metadata.keys())) assert geotransform_name in metadata, msg gn_geotransform = metadata['geo_transform'] msg = ('Geotransform obtained from GeoNode for layer %s ' 'was not correct. I got %s but expected %s' '' % (name, gn_geotransform, ref_geotransform)) assert numpy.allclose(ref_geotransform, gn_geotransform), msg
def test_another_asc(self): """Real world ASCII file can be uploaded """ thefile = os.path.join(TEST_DATA, 'lembang_mmi_hazmap.asc') layer = save_to_geonode(thefile, user=self.user) check_layer(layer)
def test_asc(self): """ASCII file can be uploaded """ thefile = os.path.join(TEST_DATA, 'test_grid.asc') uploaded = save_to_geonode(thefile, user=self.user, overwrite=True) check_layer(uploaded)
def test_shapefile(self): """Shapefile can be uploaded """ thefile = os.path.join(TEST_DATA, 'lembang_schools.shp') uploaded = save_to_geonode(thefile, user=self.user, overwrite=True) check_layer(uploaded)
def test_tiff(self): """GeoTIF file can be uploaded """ thefile = os.path.join(TEST_DATA, 'Population_2010_clip.tif') uploaded = save_to_geonode(thefile, user=self.user) check_layer(uploaded)
def test_layer_upload(self): """Layers can be uploaded to local GeoNode """ expected_layers = [] not_expected_layers = [] datadir = TEST_DATA BAD_LAYERS = ['grid_without_projection.asc'] for root, dirs, files in os.walk(datadir): for filename in files: basename, extension = os.path.splitext(filename) if extension.lower() in LAYER_TYPES: # FIXME(Ole): GeoNode converts names to lower case name = unicode(basename.lower()) if filename not in BAD_LAYERS: expected_layers.append(name) else: not_expected_layers.append(name) # Upload layers = save_to_geonode(datadir, user=self.user, overwrite=True) # Check integrity layer_names = [l.name for l in layers] for layer in layers: msg = 'Layer %s was uploaded but not expected' % layer.name assert layer.name in expected_layers, msg for layer_name in expected_layers: msg = ('The following layer should have been uploaded ' 'but was not: %s' % layer_name) assert layer_name in layer_names, msg # Check the layer is in the Django database Layer.objects.get(name=layer_name) # Check that layer is in geoserver found = False gs_username, gs_password = settings.GEOSERVER_CREDENTIALS page = get_web_page(os.path.join(settings.GEOSERVER_BASE_URL, 'rest/layers'), username=gs_username, password=gs_password) for line in page: if line.find('rest/layers/%s.html' % layer_name) > 0: found = True if not found: msg = ('Upload could not be verified, the layer %s is not ' 'in geoserver %s, but GeoNode did not raise any errors, ' 'this should never happen.' % (layer_name, settings.GEOSERVER_BASE_URL)) raise GeoNodeException(msg) server_url = settings.GEOSERVER_BASE_URL + 'ows?' # Verify that the GeoServer GetCapabilities record is accesible: metadata = get_layers_metadata(server_url, '1.0.0') msg = ('The metadata list should not be empty in server %s' % server_url) assert len(metadata) > 0, msg
def test_lembang_building_examples(self): """Lembang building impact calculation works through the API """ # Test for a range of hazard layers for mmi_filename in ['lembang_mmi_hazmap.asc']: #'Lembang_Earthquake_Scenario.asc']: # Upload input data hazardfile = os.path.join(TEST_DATA, mmi_filename) hazard_layer = save_to_geonode(hazardfile, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TEST_DATA, 'lembang_schools.shp') exposure_layer = save_to_geonode(exposurefile, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Call calculation routine # FIXME (Ole): The system freaks out if there are spaces in # bbox string. Please let us catch that and deal # nicely with it - also do this in download() bbox = '105.592,-7.809,110.159,-5.647' #print #print get_bounding_box(hazardfile) #print get_bounding_box(exposurefile) with warnings.catch_warnings(): warnings.simplefilter('ignore') c = Client() rv = c.post('/api/v1/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=bbox, impact_function='Earthquake School Damage Function', impact_level=10, keywords='test,schools,lembang', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) A = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_vector = read_layer(result_layer.filename) coordinates = impact_vector.get_geometry() attributes = impact_vector.get_data() # Verify calculated result count = 0 for i in range(len(attributes)): lon, lat = coordinates[i][:] calculated_mmi = attributes[i]['MMI'] if calculated_mmi == 0.0: # FIXME (Ole): Some points have MMI==0 here. # Weird but not a show stopper continue # Check that interpolated points are within range msg = ('Interpolated mmi %f was outside extrema: ' '[%f, %f] at location ' '[%f, %f]. ' % (calculated_mmi, mmi_min, mmi_max, lon, lat)) assert mmi_min <= calculated_mmi <= mmi_max, msg # Check calculated damage calculated_dam = attributes[i]['Percent_da'] ref_dam = lembang_damage_function(calculated_mmi) msg = ('Calculated damage was not as expected ' 'for hazard layer %s' % hazardfile) assert numpy.allclose(calculated_dam, ref_dam, rtol=1.0e-12), msg count += 1 # Make only a few points were 0 assert count > len(attributes) - 4