def test_io(self): """Data can be uploaded and downloaded from internal GeoServer """ # Upload a raster and a vector data set for filename in ['population_padang_1.asc', 'lembang_schools.shp']: basename, ext = os.path.splitext(filename) filename = os.path.join(TESTDATA, filename) layer = save_to_geonode(filename, user=self.user, overwrite=True) # Name checking layer_name = layer.name expected_name = basename.lower() msg = 'Expected layername %s but got %s' % (expected_name, layer_name) assert layer_name == expected_name, msg workspace = layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' # Check metadata assert_bounding_box_matches(layer, filename) # Download layer again using workspace:name bbox = get_bounding_box(filename) downloaded_layer = download(INTERNAL_SERVER_URL, '%s:%s' % (workspace, layer_name), bbox) assert os.path.exists(downloaded_layer.filename)
def XXtest_shakemap_population_exposure(self): """Population exposed to groundshaking matches USGS numbers """ hazardfile = os.path.join(TEST_DATA, 'shakemap_sumatra_20110129.tif') hazard_layer = save_to_geonode(hazardfile, overwrite=True, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TEST_DATA, 'population_indonesia_2008.tif') exposure_layer = save_to_geonode(exposurefile, overwrite=True, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) #with warnings.catch_warnings(): # warnings.simplefilter('ignore') c = Client() rv = c.post('/api/v1/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=get_bounding_box_string(hazardfile), impact_function='USGSFatalityFunction', impact_level=10, keywords='test,shakemap,usgs', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box(hazardfile)) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) H = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_raster = read_layer(result_layer.filename) I = impact_raster.get_data()
def test_io(self): """Data can be uploaded and downloaded from internal GeoServer """ # Upload a raster and a vector data set for filename in ['lembang_mmi_hazmap.asc', 'lembang_schools.shp']: basename, ext = os.path.splitext(filename) filename = os.path.join(TEST_DATA, filename) layer = save_to_geonode(filename, user=self.user) # Name checking layer_name = layer.name workspace = layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' msg = 'Expected layer name to be "geonode". Got %s' % workspace assert workspace == 'geonode', msg # Check metadata assert isinstance(layer.geographic_bounding_box, basestring) # Exctract bounding bounding box from layer handle s = 'POLYGON((' i = layer.geographic_bounding_box.find(s) + len(s) assert i > len(s) j = layer.geographic_bounding_box.find('))') assert j > i bbox_string = str(layer.geographic_bounding_box[i:j]) A = numpy.array([[float(x[0]), float(x[1])] for x in (p.split() for p in bbox_string.split(','))]) south = min(A[:, 1]) north = max(A[:, 1]) west = min(A[:, 0]) east = max(A[:, 0]) bbox = [west, south, east, north] # Check correctness of bounding box against reference ref_bbox = get_bounding_box(filename) msg = ('Bounding box from layer handle "%s" was not as expected.\n' 'Got %s, expected %s' % (layer_name, bbox, ref_bbox)) assert numpy.allclose(bbox, ref_bbox), msg # Download layer again using workspace:name downloaded_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(downloaded_layer.filename)
def XXtest_shakemap_population_exposure(self): """Population exposed to groundshaking matches USGS numbers """ hazardfile = os.path.join(TESTDATA, 'shakemap_sumatra_20110129.tif') hazard_layer = save_to_geonode(hazardfile, overwrite=True, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TESTDATA, 'population_indonesia_2008.tif') exposure_layer = save_to_geonode(exposurefile, overwrite=True, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) #with warnings.catch_warnings(): # warnings.simplefilter('ignore') c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=get_bounding_box_string(hazardfile), impact_function='USGSFatalityFunction', keywords='test,shakemap,usgs')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box(hazardfile)) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) H = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_raster = read_layer(result_layer.filename) I = impact_raster.get_data()
def test_native_raster_resolution(self): """Raster layer retains native resolution through Geoserver Raster layer can be uploaded and downloaded again with native resolution. This is one test for ticket #103 """ hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc' % TESTDATA) # Get reference values H = read_layer(hazard_filename) A_ref = H.get_data(nan=True) depth_min_ref, depth_max_ref = H.get_extrema() # Upload to internal geonode hazard_layer = save_to_geonode(hazard_filename, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Download data again with native resolution bbox = get_bounding_box_string(hazard_filename) H = download(INTERNAL_SERVER_URL, hazard_name, bbox) A = H.get_data(nan=True) # Compare shapes msg = ('Shape of downloaded raster was [%i, %i]. ' 'Expected [%i, %i].' % (A.shape[0], A.shape[1], A_ref.shape[0], A_ref.shape[1])) assert numpy.allclose(A_ref.shape, A.shape, rtol=0, atol=0), msg # Compare extrema to values reference values (which have also been # verified by QGIS for this layer and tested in test_engine.py) depth_min, depth_max = H.get_extrema() msg = ('Extrema of downloaded file were [%f, %f] but ' 'expected [%f, %f]' % (depth_min, depth_max, depth_min_ref, depth_max_ref)) assert numpy.allclose([depth_min, depth_max], [depth_min_ref, depth_max_ref], rtol=1.0e-6, atol=1.0e-10), msg # Compare data number by number assert nanallclose(A, A_ref, rtol=1.0e-8)
def Xtest_raster_upload(self): """Raster layer can be uploaded and downloaded again correctly """ hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc' % TESTDATA) # Get reference values H = read_layer(hazard_filename) A_ref = H.get_data() depth_min_ref, depth_max_ref = H.get_extrema() # Upload to internal geonode hazard_layer = save_to_geonode(hazard_filename, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Download data again bbox = get_bounding_box_string(hazard_filename) H = download(INTERNAL_SERVER_URL, hazard_name, bbox) A = H.get_data() # Compare shapes msg = ('Shape of downloaded raster was [%i, %i]. ' 'Expected [%i, %i].' % (A.shape[0], A.shape[1], A_ref.shape[0], A_ref.shape[1])) assert numpy.allclose(A_ref.shape, A.shape, rtol=0, atol=0), msg # Compare extrema to values reference values (which have also been # verified by QGIS for this layer and tested in test_engine.py) depth_min, depth_max = H.get_extrema() msg = ('Extrema of downloaded file were [%f, %f] but ' 'expected [%f, %f]' % (depth_min, depth_max, depth_min_ref, depth_max_ref)) assert numpy.allclose([depth_min, depth_max], [depth_min_ref, depth_max_ref], rtol=1.0e-6, atol=1.0e-10), msg
def test_data_resampling_example(self): """Raster data is unchanged when going through geonode """ # Name file names for hazard level, exposure and expected fatalities hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc' % TESTDATA) exposure_filename = ('%s/maumere_pop_prj.shp' % TESTDATA) #------------ # Hazard data #------------ # Read hazard input data for reference H_ref = read_layer(hazard_filename) A_ref = H_ref.get_data() depth_min_ref, depth_max_ref = H_ref.get_extrema() # Upload to internal geonode hazard_layer = save_to_geonode(hazard_filename, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Download data again bbox = get_bounding_box_string(hazard_filename) # The biggest H = download(INTERNAL_SERVER_URL, hazard_name, bbox) A = H.get_data() depth_min, depth_max = H.get_extrema() # FIXME (Ole): The layer read from file is single precision only: # Issue #17 # Here's the explanation why interpolation below produce slightly # different results (but why?) # The layer read from file is single precision which may be due to # the way it is converted from ASC to TIF. In other words the # problem may be in raster.write_to_file. Float64 is # specified there, so this is a mystery. #print 'A', A.dtype # Double precision #print 'A_ref', A_ref.dtype # Single precision # Compare extrema to values from numpy array assert numpy.allclose(depth_max, numpy.nanmax(A), rtol=1.0e-12, atol=1.0e-12) assert numpy.allclose(depth_max_ref, numpy.nanmax(A_ref), rtol=1.0e-12, atol=1.0e-12) # Compare to reference assert numpy.allclose([depth_min, depth_max], [depth_min_ref, depth_max_ref], rtol=1.0e-12, atol=1.0e-12) # Compare extrema to values read off QGIS for this layer assert numpy.allclose([depth_min, depth_max], [0.0, 16.68], rtol=1.0e-6, atol=1.0e-10) # Investigate difference visually #from matplotlib.pyplot import matshow, show #matshow(A) #matshow(A_ref) #matshow(A - A_ref) #show() #print for i in range(A.shape[0]): for j in range(A.shape[1]): if not numpy.isnan(A[i, j]): err = abs(A[i, j] - A_ref[i, j]) if err > 0: msg = ('%i, %i: %.15f, %.15f, %.15f' % (i, j, A[i, j], A_ref[i, j], err)) raise Exception(msg) #if A[i,j] > 16: # print i, j, A[i, j], A_ref[i, j] # Compare elements (nan & numbers) id_nan = numpy.isnan(A) id_nan_ref = numpy.isnan(A_ref) assert numpy.all(id_nan == id_nan_ref) assert numpy.allclose(A[-id_nan], A_ref[-id_nan], rtol=1.0e-15, atol=1.0e-15) #print 'MAX', A[245, 283], A_ref[245, 283] #print 'MAX: %.15f %.15f %.15f' %(A[245, 283], A_ref[245, 283]) assert numpy.allclose(A[245, 283], A_ref[245, 283], rtol=1.0e-15, atol=1.0e-15) #-------------- # Exposure data #-------------- # Read exposure input data for reference E_ref = read_layer(exposure_filename) # Upload to internal geonode exposure_layer = save_to_geonode(exposure_filename, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Download data again E = download(INTERNAL_SERVER_URL, exposure_name, bbox) # Check exposure data against reference coordinates = E.get_geometry() coordinates_ref = E_ref.get_geometry() assert numpy.allclose(coordinates, coordinates_ref, rtol=1.0e-12, atol=1.0e-12) attributes = E.get_data() attributes_ref = E_ref.get_data() for i, att in enumerate(attributes): att_ref = attributes_ref[i] for key in att: assert att[key] == att_ref[key] # Test riab's interpolation function I = H.interpolate(E, name='depth') icoordinates = I.get_geometry() I_ref = H_ref.interpolate(E_ref, name='depth') icoordinates_ref = I_ref.get_geometry() assert numpy.allclose(coordinates, icoordinates, rtol=1.0e-12, atol=1.0e-12) assert numpy.allclose(coordinates, icoordinates_ref, rtol=1.0e-12, atol=1.0e-12) iattributes = I.get_data() assert numpy.allclose(icoordinates, coordinates) N = len(icoordinates) assert N == 891 # Set tolerance for single precision until issue #17 has been fixed # It appears that the single precision leads to larger interpolation # errors rtol_issue17 = 2.0e-3 atol_issue17 = 1.0e-4 # Verify interpolated values with test result for i in range(N): interpolated_depth_ref = I_ref.get_data()[i]['depth'] interpolated_depth = iattributes[i]['depth'] assert nanallclose(interpolated_depth, interpolated_depth_ref, rtol=rtol_issue17, atol=atol_issue17) pointid = attributes[i]['POINTID'] if pointid == 263: #print i, pointid, attributes[i], #print interpolated_depth, coordinates[i] # Check that location is correct assert numpy.allclose(coordinates[i], [122.20367299, -8.61300358], rtol=1.0e-7, atol=1.0e-12) # This is known to be outside inundation area so should # near zero assert numpy.allclose(interpolated_depth, 0.0, rtol=1.0e-12, atol=1.0e-12) if pointid == 148: # Check that location is correct #print coordinates[i] assert numpy.allclose(coordinates[i], [122.2045912, -8.608483265], rtol=1.0e-7, atol=1.0e-12) # This is in an inundated area with a surrounding depths of # 4.531, 3.911 # 2.675, 2.583 assert interpolated_depth < 4.531 assert interpolated_depth < 3.911 assert interpolated_depth > 2.583 assert interpolated_depth > 2.675 #print interpolated_depth # This is a characterisation test for bilinear interpolation assert numpy.allclose(interpolated_depth, 3.62477215491, rtol=rtol_issue17, atol=1.0e-12) # Check that interpolated points are within range msg = ('Interpolated depth %f at point %i was outside extrema: ' '[%f, %f]. ' % (interpolated_depth, i, depth_min, depth_max)) if not numpy.isnan(interpolated_depth): assert depth_min <= interpolated_depth <= depth_max, msg
def test_lembang_building_examples(self): """Lembang building impact calculation works through the API """ # Test for a range of hazard layers for mmi_filename in ['lembang_mmi_hazmap.asc']: #'Lembang_Earthquake_Scenario.asc']: # Upload input data hazardfile = os.path.join(TESTDATA, mmi_filename) hazard_layer = save_to_geonode(hazardfile, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TESTDATA, 'lembang_schools.shp') exposure_layer = save_to_geonode(exposurefile, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Call calculation routine # FIXME (Ole): The system freaks out if there are spaces in # bbox string. Please let us catch that and deal # nicely with it - also do this in download() bbox = '105.592,-7.809,110.159,-5.647' #print #print get_bounding_box(hazardfile) #print get_bounding_box(exposurefile) with warnings.catch_warnings(): warnings.simplefilter('ignore') c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=bbox, impact_function='Earthquake Building Damage Function', keywords='test,schools,lembang', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) A = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_vector = read_layer(result_layer.filename) coordinates = impact_vector.get_geometry() attributes = impact_vector.get_data() # Verify calculated result count = 0 for i in range(len(attributes)): lon, lat = coordinates[i][:] calculated_mmi = attributes[i]['MMI'] if calculated_mmi == 0.0: # FIXME (Ole): Some points have MMI==0 here. # Weird but not a show stopper continue # Check that interpolated points are within range msg = ('Interpolated mmi %f was outside extrema: ' '[%f, %f] at location ' '[%f, %f]. ' % (calculated_mmi, mmi_min, mmi_max, lon, lat)) assert mmi_min <= calculated_mmi <= mmi_max, msg # Check calculated damage calculated_dam = attributes[i]['DAMAGE'] ref_dam = lembang_damage_function(calculated_mmi) msg = ('Calculated damage was not as expected ' 'for hazard layer %s' % hazardfile) assert numpy.allclose(calculated_dam, ref_dam, rtol=1.0e-12), msg count += 1 # Make only a few points were 0 assert count > len(attributes) - 4
def test_padang_building_examples(self): """Padang building impact calculation works through the API """ # Test for a range of hazard layers for mmi_filename in ['Shakemap_Padang_2009.asc']: #'Lembang_Earthquake_Scenario.asc']: # Upload input data hazardfile = os.path.join(TESTDATA, mmi_filename) hazard_layer = save_to_geonode(hazardfile, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TESTDATA, 'Padang_WGS84.shp') exposure_layer = save_to_geonode(exposurefile, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Call calculation routine # FIXME (Ole): The system freaks out if there are spaces in # bbox string. Please let us catch that and deal # nicely with it - also do this in download() bbox = '96.956, -5.51, 104.63933, 2.289497' with warnings.catch_warnings(): warnings.simplefilter('ignore') c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=bbox, impact_function='Padang Earthquake ' \ 'Building Damage Function', keywords='test,buildings,padang', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) A = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_vector = read_layer(result_layer.filename) coordinates = impact_vector.get_geometry() attributes = impact_vector.get_data() # Verify calculated result count = 0 verified_count = 0 for i in range(len(attributes)): lon, lat = coordinates[i][:] calculated_mmi = attributes[i]['MMI'] if calculated_mmi == 0.0: # FIXME (Ole): Some points have MMI==0 here. # Weird but not a show stopper continue # Check that interpolated points are within range msg = ('Interpolated mmi %f was outside extrema: ' '[%f, %f] at location ' '[%f, %f]. ' % (calculated_mmi, mmi_min, mmi_max, lon, lat)) assert mmi_min <= calculated_mmi <= mmi_max, msg building_class = attributes[i]['TestBLDGCl'] # Check calculated damage calculated_dam = attributes[i]['DAMAGE'] verified_dam = padang_check_results( calculated_mmi, building_class) #print calculated_mmi, building_class, calculated_dam if verified_dam: msg = ('Calculated damage was not as expected ' 'for hazard layer %s. I got %f ' 'but expected %f' % (hazardfile, calculated_dam, verified_dam)) assert numpy.allclose(calculated_dam, verified_dam, rtol=1.0e-4), msg verified_count += 1 count += 1 msg = ('No points was verified in output. Please create ' 'table withe reference data') assert verified_count > 0, msg msg = 'Number buildings was not 3896.' assert count == 3896, msg
def test_padang_building_examples(self): """Padang building impact calculation works through the API """ # Test for a range of hazard layers for mmi_filename in ['Shakemap_Padang_2009.asc']: #'Lembang_Earthquake_Scenario.asc']: # Upload input data hazardfile = os.path.join(TESTDATA, mmi_filename) hazard_layer = save_to_geonode(hazardfile, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposurefile = os.path.join(TESTDATA, 'Padang_WGS84.shp') exposure_layer = save_to_geonode(exposurefile, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Call calculation routine # FIXME (Ole): The system freaks out if there are spaces in # bbox string. Please let us catch that and deal # nicely with it - also do this in download() bbox = '96.956, -5.51, 104.63933, 2.289497' with warnings.catch_warnings(): warnings.simplefilter('ignore') c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=bbox, impact_function='Padang Earthquake ' \ 'Building Damage Function', keywords='test,buildings,padang', )) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) assert 'hazard_layer' in data.keys() assert 'exposure_layer' in data.keys() assert 'run_duration' in data.keys() assert 'run_date' in data.keys() assert 'layer' in data.keys() # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, bbox) assert os.path.exists(result_layer.filename) # Read hazard data for reference hazard_raster = read_layer(hazardfile) A = hazard_raster.get_data() mmi_min, mmi_max = hazard_raster.get_extrema() # Read calculated result impact_vector = read_layer(result_layer.filename) coordinates = impact_vector.get_geometry() attributes = impact_vector.get_data() # Verify calculated result count = 0 verified_count = 0 for i in range(len(attributes)): lon, lat = coordinates[i][:] calculated_mmi = attributes[i]['MMI'] if calculated_mmi == 0.0: # FIXME (Ole): Some points have MMI==0 here. # Weird but not a show stopper continue # Check that interpolated points are within range msg = ('Interpolated mmi %f was outside extrema: ' '[%f, %f] at location ' '[%f, %f]. ' % (calculated_mmi, mmi_min, mmi_max, lon, lat)) assert mmi_min <= calculated_mmi <= mmi_max, msg building_class = attributes[i]['TestBLDGCl'] # Check calculated damage calculated_dam = attributes[i]['DAMAGE'] verified_dam = padang_check_results(calculated_mmi, building_class) #print calculated_mmi, building_class, calculated_dam if verified_dam: msg = ('Calculated damage was not as expected ' 'for hazard layer %s. I got %f ' 'but expected %f' % (hazardfile, calculated_dam, verified_dam)) assert numpy.allclose(calculated_dam, verified_dam, rtol=1.0e-4), msg verified_count += 1 count += 1 msg = ('No points was verified in output. Please create ' 'table withe reference data') assert verified_count > 0, msg msg = 'Number buildings was not 3896.' assert count == 3896, msg
def calculate(request, save_output=dummy_save): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] bbox = data['bbox'] keywords = data['keywords'] theuser = get_guaranteed_valid_user(request.user) plugin_list = get_plugins(impact_function_name) _, impact_function = plugin_list[0].items()[0] impact_function_source = inspect.getsource(impact_function) # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server='exposure_server', exposure_layer='exposure_layer', impact_function=impact_function_name, impact_function_source=impact_function_source, bbox=bbox, success=False) calculation.save() logger.info('Performing requested calculation') # Download selected layer objects logger.info('- Downloading hazard layer %s from %s' % (hazard_layer, hazard_server)) H = download(hazard_server, hazard_layer, bbox) logger.info('- Downloading exposure layer %s from %s' % (exposure_layer, exposure_server)) E = download(exposure_server, exposure_layer, bbox) # Calculate result using specified impact function logger.info('- Calculating impact using %s' % impact_function) impact_filename = calculate_impact(layers=[H, E], impact_function=impact_function) # Upload result to internal GeoServer logger.info('- Uploading impact layer %s' % impact_filename) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) logger.info('- Result available at %s.' % result.get_absolute_url()) calculation.layer = result.get_absolute_url() calculation.success = True calculation.save() output = calculation.__dict__ # json.dumps does not like datetime objects, # let's make it a json string ourselves output['run_date'] = 'new Date("%s")' % calculation.run_date # FIXME:This should not be needed in an ideal world ows_server_url = settings.GEOSERVER_BASE_URL + 'ows', output['ows_server_url'] = ows_server_url # json.dumps does not like django users output['user'] = calculation.user.username # Delete _state and _user_cache item from the dict, # they were created automatically by Django del output['_user_cache'] del output['_state'] jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def test_raster_scaling(self): """Raster layers can be scaled when resampled This is a test for ticket #168 Native test .asc data has ncols 5525 nrows 2050 cellsize 0.0083333333333333 Scaling is necessary for raster data that represents density such as population per km^2 """ for test_filename in [ 'Population_Jakarta_geographic.asc', 'Population_2010.asc' ]: raster_filename = ('%s/%s' % (TESTDATA, test_filename)) # Get reference values R = read_layer(raster_filename) R_min_ref, R_max_ref = R.get_extrema() native_resolution = R.get_resolution() # Upload to internal geonode raster_layer = save_to_geonode(raster_filename, user=self.user) raster_name = '%s:%s' % (raster_layer.workspace, raster_layer.name) # Test for a range of resolutions for res in [ 0.02, 0.01, 0.005, 0.002, 0.001, 0.0005, # Coarser 0.0002 ]: # Finer # To save time don't do finest resolution for the # large population set if test_filename.startswith('Population_2010') and res < 0.005: break bbox = get_bounding_box_string(raster_filename) R = download(INTERNAL_SERVER_URL, raster_name, bbox, resolution=res) A_native = R.get_data(scaling=False) A_scaled = R.get_data(scaling=True) sigma = (R.get_resolution()[0] / native_resolution[0])**2 # Compare extrema expected_scaled_max = sigma * numpy.nanmax(A_native) msg = ('Resampled raster was not rescaled correctly: ' 'max(A_scaled) was %f but expected %f' % (numpy.nanmax(A_scaled), expected_scaled_max)) assert numpy.allclose(expected_scaled_max, numpy.nanmax(A_scaled), rtol=1.0e-8, atol=1.0e-8), msg expected_scaled_min = sigma * numpy.nanmin(A_native) msg = ('Resampled raster was not rescaled correctly: ' 'min(A_scaled) was %f but expected %f' % (numpy.nanmin(A_scaled), expected_scaled_min)) assert numpy.allclose(expected_scaled_min, numpy.nanmin(A_scaled), rtol=1.0e-8, atol=1.0e-12), msg # Compare elementwise msg = 'Resampled raster was not rescaled correctly' assert nanallclose(A_native * sigma, A_scaled, rtol=1.0e-8, atol=1.0e-8), msg # Check that it also works with manual scaling A_manual = R.get_data(scaling=sigma) msg = 'Resampled raster was not rescaled correctly' assert nanallclose(A_manual, A_scaled, rtol=1.0e-8, atol=1.0e-8), msg # Check that an exception is raised for bad arguments try: R.get_data(scaling='bad') except: pass else: msg = 'String argument should have raised exception' raise Exception(msg) try: R.get_data(scaling='(1, 3)') except: pass else: msg = 'Tuple argument should have raised exception' raise Exception(msg) # Check None option without existence of density keyword A_none = R.get_data(scaling=None) msg = 'Data should not have changed' assert nanallclose(A_native, A_none, rtol=1.0e-12, atol=1.0e-12), msg # Try with None and density keyword R.keywords['density'] = 'true' A_none = R.get_data(scaling=None) msg = 'Resampled raster was not rescaled correctly' assert nanallclose(A_scaled, A_none, rtol=1.0e-12, atol=1.0e-12), msg R.keywords['density'] = 'Yes' A_none = R.get_data(scaling=None) msg = 'Resampled raster was not rescaled correctly' assert nanallclose(A_scaled, A_none, rtol=1.0e-12, atol=1.0e-12), msg R.keywords['density'] = 'False' A_none = R.get_data(scaling=None) msg = 'Data should not have changed' assert nanallclose(A_native, A_none, rtol=1.0e-12, atol=1.0e-12), msg R.keywords['density'] = 'no' A_none = R.get_data(scaling=None) msg = 'Data should not have changed' assert nanallclose(A_native, A_none, rtol=1.0e-12, atol=1.0e-12), msg
def test_specified_raster_resolution(self): """Raster layers can be downloaded with specific resolution This is another test for ticket #103 Native test data: maumere....asc ncols 931 nrows 463 cellsize 0.00018 Population_Jakarta ncols 638 nrows 649 cellsize 0.00045228819716044 Population_2010 ncols 5525 nrows 2050 cellsize 0.0083333333333333 Here we download it at a range of fixed resolutions that are both coarser and finer, and check that the dimensions of the downloaded matrix are as expected. We also check that the extrema of the subsampled matrix are sane """ for test_filename in [ 'maumere_aos_depth_20m_land_wgs84.asc', 'Population_Jakarta_geographic.asc', 'Population_2010.asc' ]: hazard_filename = ('%s/%s' % (TESTDATA, test_filename)) # Get reference values H = read_layer(hazard_filename) depth_min_ref, depth_max_ref = H.get_extrema() native_resolution = H.get_resolution() # Upload to internal geonode hazard_layer = save_to_geonode(hazard_filename, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Test for a range of resolutions for res in [ 0.02, 0.01, 0.005, 0.002, 0.001, 0.0005, # Coarser 0.0002, 0.0001, 0.00006, 0.00003 ]: # Finer # To save time don't do finest resolution for the # two population sets if test_filename.startswith('Population') and res < 0.00006: break # Set bounding box bbox = get_bounding_box_string(hazard_filename) compare_extrema = True if test_filename == 'Population_2010.asc': # Make bbox small for finer resolutions to # save time and to test that as well. # However, extrema obviously won't match those # of the full dataset. Once we can clip # datasets, we can remove this restriction. if res < 0.005: bbox = '106.685974,-6.373421,106.974534,-6.079886' compare_extrema = False bb = bboxstring2list(bbox) # Download data at specified resolution H = download(INTERNAL_SERVER_URL, hazard_name, bbox, resolution=res) A = H.get_data() # Verify that data has the requested bobx and resolution actual_bbox = H.get_bounding_box() msg = ('Bounding box for %s was not as requested. I got %s ' 'but ' 'expected %s' % (hazard_name, actual_bbox, bb)) assert numpy.allclose(actual_bbox, bb, rtol=1.0e-6) # FIXME (Ole): How do we sensibly resolve the issue with # resx, resy vs one resolution (issue #173) actual_resolution = H.get_resolution()[0] # FIXME (Ole): Resolution is often far from the requested # see issue #102 # Here we have to accept up to 5% tolerance102 = 5.0e-2 msg = ('Resolution of %s was not as requested. I got %s but ' 'expected %s' % (hazard_name, actual_resolution, res)) assert numpy.allclose(actual_resolution, res, rtol=tolerance102), msg # Determine expected shape from bbox (W, S, E, N) ref_rows = int(round((bb[3] - bb[1]) / res)) ref_cols = int(round((bb[2] - bb[0]) / res)) # Compare shapes (generally, this may differ by 1) msg = ('Shape of downloaded raster was [%i, %i]. ' 'Expected [%i, %i].' % (A.shape[0], A.shape[1], ref_rows, ref_cols)) assert (ref_rows == A.shape[0] and ref_cols == A.shape[1]), msg # Assess that the range of the interpolated data is sane if not compare_extrema: continue # For these test sets we get exact match of the minimum msg = ( 'Minimum of %s resampled at resolution %f ' 'was %f. Expected %f.' % (hazard_layer.name, res, numpy.nanmin(A), depth_min_ref)) assert numpy.allclose(depth_min_ref, numpy.nanmin(A), rtol=0.0, atol=0.0), msg # At the maximum it depends on the subsampling msg = ( 'Maximum of %s resampled at resolution %f ' 'was %f. Expected %f.' % (hazard_layer.name, res, numpy.nanmax(A), depth_max_ref)) if res < native_resolution[0]: # When subsampling to finer resolutions we expect a # close match assert numpy.allclose(depth_max_ref, numpy.nanmax(A), rtol=1.0e-10, atol=1.0e-8), msg elif res < native_resolution[0] * 10: # When upsampling to coarser resolutions we expect # ballpark match (~20%) assert numpy.allclose(depth_max_ref, numpy.nanmax(A), rtol=0.17, atol=0.0), msg else: # Upsampling to very coarse resolutions, just want sanity assert 0 < numpy.nanmax(A) <= depth_max_ref
def test_keywords_download(self): """Keywords are downloaded from GeoServer along with layer data """ # Upload test data filenames = [ 'Lembang_Earthquake_Scenario.asc', 'Padang_WGS84.shp', 'maumere_aos_depth_20m_land_wgs84.asc' ] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(TESTDATA, filename) # Upload to GeoNode layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): # Get reference keyword dictionary from file L = read_layer(paths[i]) ref_keywords = L.get_keywords() # Get keywords metadata from GeoServer layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'keywords' in metadata geo_keywords = metadata['keywords'] msg = ('Uploaded keywords were not as expected: I got %s ' 'but expected %s' % (geo_keywords, ref_keywords)) for kw in ref_keywords: # Check that all keywords were uploaded # It is OK for new automatic keywords to have appeared # (e.g. resolution) - see issue #171 assert kw in geo_keywords, msg assert ref_keywords[kw] == geo_keywords[kw], msg # Download data bbox = get_bounding_box_string(paths[i]) H = download(INTERNAL_SERVER_URL, layer_name, bbox) dwn_keywords = H.get_keywords() msg = ('Downloaded keywords were not as expected: I got %s ' 'but expected %s' % (dwn_keywords, geo_keywords)) assert geo_keywords == dwn_keywords, msg # Check that the layer and its .keyword file is there. msg = 'Downloaded layer %s was not found' % H.filename assert os.path.isfile(H.filename), msg kw_filename = os.path.splitext(H.filename)[0] + '.keywords' msg = 'Downloaded keywords file %s was not found' % kw_filename assert os.path.isfile(kw_filename), msg # Check that keywords are OK when reading downloaded file L = read_layer(H.filename) read_keywords = L.get_keywords() msg = ('Keywords in downloaded file %s were not as expected: ' 'I got %s but expected %s' % (kw_filename, read_keywords, geo_keywords)) assert read_keywords == geo_keywords, msg
def test_earthquake_exposure_plugin(self): """Population exposure to individual MMI levels can be computed """ # Upload exposure data for this test # FIXME (Ole): While this dataset is ok for testing, # note that is has been resampled without scaling # so numbers are about 25 times too large. # Consider replacing test populations dataset for good measures, # just in case any one accidentally started using this dataset # for real. name = 'Population_2010' exposure_filename = '%s/%s.asc' % (TESTDATA, name) exposure_layer = save_to_geonode(exposure_filename, user=self.user, overwrite=True) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Check metadata assert_bounding_box_matches(exposure_layer, exposure_filename) exp_bbox_string = get_bounding_box_string(exposure_filename) check_layer(exposure_layer, full=True) # Upload hazard data filename = 'Lembang_Earthquake_Scenario.asc' hazard_filename = '%s/%s' % (TESTDATA, filename) hazard_layer = save_to_geonode(hazard_filename, user=self.user, overwrite=True) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Check metadata assert_bounding_box_matches(hazard_layer, hazard_filename) haz_bbox_string = get_bounding_box_string(hazard_filename) check_layer(hazard_layer, full=True) # Run calculation c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=haz_bbox_string, impact_function='EarthquakePopulationExposureFunction', keywords='test,population,exposure,usgs')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) if 'errors' in data: errors = data['errors'] if errors is not None: msg = ('The server returned the error message: %s' % str(errors)) raise Exception(msg) assert 'success' in data assert 'hazard_layer' in data assert 'exposure_layer' in data assert 'run_duration' in data assert 'run_date' in data assert 'layer' in data assert data['success'] # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box_string(hazard_filename)) assert os.path.exists(result_layer.filename) # Check calculated values keywords = result_layer.get_keywords() assert 'mmi-classes' in keywords assert 'affected-population' in keywords mmi_classes = [int(x) for x in keywords['mmi-classes'].split('_')] count = [float(x) for x in keywords['affected-population'].split('_')] # Brute force count for each population level population = download(INTERNAL_SERVER_URL, exposure_name, get_bounding_box_string(hazard_filename)) intensity = download(INTERNAL_SERVER_URL, hazard_name, get_bounding_box_string(hazard_filename)) # Extract data H = intensity.get_data(nan=0) P = population.get_data(nan=0) brutecount = {} for mmi in mmi_classes: brutecount[mmi] = 0 for i in range(P.shape[0]): for j in range(P.shape[1]): mmi = H[i, j] if not numpy.isnan(mmi): mmi_class = int(round(mmi)) pop = P[i, j] if not numpy.isnan(pop): brutecount[mmi_class] += pop for i, mmi in enumerate(mmi_classes): assert numpy.allclose(count[i], brutecount[mmi], rtol=1.0e-6)
def test_linked_datasets(self): """Linked datesets can be pulled in e.g. to include gender break down """ # Upload exposure data for this test. This will automatically # pull in female_pct_yogya.asc through its "associates" keyword name = 'population_yogya' exposure_filename = '%s/%s.asc' % (TESTDATA, name) exposure_layer = save_to_geonode(exposure_filename, user=self.user, overwrite=True) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Check metadata assert_bounding_box_matches(exposure_layer, exposure_filename) exp_bbox_string = get_bounding_box_string(exposure_filename) check_layer(exposure_layer, full=True) # Upload hazard data filename = 'eq_yogya_2006.asc' hazard_filename = '%s/%s' % (TESTDATA, filename) hazard_layer = save_to_geonode(hazard_filename, user=self.user, overwrite=True) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Check metadata assert_bounding_box_matches(hazard_layer, hazard_filename) haz_bbox_string = get_bounding_box_string(hazard_filename) check_layer(hazard_layer, full=True) # Run calculation c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=haz_bbox_string, impact_function='EarthquakeFatalityFunction', keywords='test,fatalities,population,usgs')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) if 'errors' in data: errors = data['errors'] if errors is not None: msg = ('The server returned the error message: %s' % str(errors)) raise Exception(msg) assert 'success' in data assert 'hazard_layer' in data assert 'exposure_layer' in data assert 'run_duration' in data assert 'run_date' in data assert 'layer' in data assert data['success'] # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box_string(hazard_filename)) assert os.path.exists(result_layer.filename) # Check calculated values keywords = result_layer.get_keywords() assert 'caption' in keywords
def Xtest_interpolation_example(self): """Interpolation is done correctly with data going through geonode This data (Maumere scenaria) showed some very wrong results when first attempted in August 2011 - hence this test """ # Name file names for hazard level, exposure and expected fatalities hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc' % TESTDATA) exposure_filename = ('%s/maumere_pop_prj.shp' % TESTDATA) # Upload to internal geonode hazard_layer = save_to_geonode(hazard_filename, user=self.user) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) exposure_layer = save_to_geonode(exposure_filename, user=self.user) exposure_name = '%s:%s' % (exposure_layer.workspace, exposure_layer.name) # Download data again bbox = get_bounding_box_string(hazard_filename) # The biggest H = download(INTERNAL_SERVER_URL, hazard_name, bbox) E = download(INTERNAL_SERVER_URL, exposure_name, bbox) A = H.get_data() depth_min, depth_max = H.get_extrema() # Compare extrema to values read off QGIS for this layer print 'E', depth_min, depth_max assert numpy.allclose([depth_min, depth_max], [0.0, 16.68], rtol=1.0e-6, atol=1.0e-10) coordinates = E.get_geometry() attributes = E.get_data() # Interpolate I = H.interpolate(E, name='depth') Icoordinates = I.get_geometry() Iattributes = I.get_data() assert numpy.allclose(Icoordinates, coordinates) N = len(Icoordinates) assert N == 891 # Verify interpolated values with test result for i in range(N): interpolated_depth = Iattributes[i]['depth'] pointid = attributes[i]['POINTID'] if pointid == 263: # Check that location is correct assert numpy.allclose(coordinates[i], [122.20367299, -8.61300358]) # This is known to be outside inundation area so should # near zero assert numpy.allclose(interpolated_depth, 0.0, rtol=1.0e-12, atol=1.0e-12) if pointid == 148: # Check that location is correct assert numpy.allclose(coordinates[i], [122.2045912, -8.608483265]) # This is in an inundated area with a surrounding depths of # 4.531, 3.911 # 2.675, 2.583 assert interpolated_depth < 4.531 assert interpolated_depth > 2.583 assert numpy.allclose(interpolated_depth, 3.553, rtol=1.0e-5, atol=1.0e-5) # Check that interpolated points are within range msg = ('Interpolated depth %f at point %i was outside extrema: ' '[%f, %f]. ' % (interpolated_depth, i, depth_min, depth_max)) if not numpy.isnan(interpolated_depth): tol = 1.0e-6
def test_the_earthquake_fatality_estimation_allen(self): """Fatality computation computed correctly with GeoServer Data """ # Simulate bounding box from application viewport_bbox_string = '104.3,-8.2,110.04,-5.17' # Upload exposure data for this test name = 'Population_2010' exposure_filename = '%s/%s.asc' % (TESTDATA, name) exposure_layer = save_to_geonode(exposure_filename, user=self.user, overwrite=True) workspace = exposure_layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' layer_name = exposure_layer.name msg = 'Expected layer name to be "%s". Got %s' % (name, layer_name) assert layer_name == name.lower(), msg exposure_name = '%s:%s' % (workspace, layer_name) # Check metadata assert_bounding_box_matches(exposure_layer, exposure_filename) exp_bbox_string = get_bounding_box_string(exposure_filename) check_layer(exposure_layer, full=True) # Now we know that exposure layer is good, lets upload some # hazard layers and do the calculations filename = 'Lembang_Earthquake_Scenario.asc' # Save hazard_filename = '%s/%s' % (TESTDATA, filename) hazard_layer = save_to_geonode(hazard_filename, user=self.user, overwrite=True) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Check metadata assert_bounding_box_matches(hazard_layer, hazard_filename) haz_bbox_string = get_bounding_box_string(hazard_filename) check_layer(hazard_layer, full=True) # Run calculation c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, #bbox=viewport_bbox_string, bbox=exp_bbox_string, # This one reproduced the # crash for lembang impact_function='EarthquakeFatalityFunction', keywords='test,shakemap,usgs')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) if 'errors' in data: errors = data['errors'] if errors is not None: msg = ('The server returned the error message: %s' % str(errors)) raise Exception(msg) assert 'success' in data assert 'hazard_layer' in data assert 'exposure_layer' in data assert 'run_duration' in data assert 'run_date' in data assert 'layer' in data assert data['success'] # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box_string(hazard_filename)) assert os.path.exists(result_layer.filename)
def test_jakarta_flood_study(self): """HKV Jakarta flood study calculated correctly using the API """ # FIXME (Ole): Redo with population as shapefile later # Expected values from HKV expected_values = [2485442, 1537920] # Name files for hazard level, exposure and expected fatalities population = 'Population_Jakarta_geographic' plugin_name = 'FloodImpactFunction' # Upload exposure data for this test exposure_filename = '%s/%s.asc' % (TESTDATA, population) exposure_layer = save_to_geonode(exposure_filename, user=self.user, overwrite=True) workspace = exposure_layer.workspace msg = 'Expected workspace to be "geonode". Got %s' % workspace assert workspace == 'geonode' layer_name = exposure_layer.name msg = 'Expected layer name to be "%s". Got %s' % (population, layer_name) assert layer_name.lower() == population.lower(), msg exposure_name = '%s:%s' % (workspace, layer_name) # Check metadata assert_bounding_box_matches(exposure_layer, exposure_filename) exp_bbox_string = get_bounding_box_string(exposure_filename) check_layer(exposure_layer, full=True) # Now we know that exposure layer is good, lets upload some # hazard layers and do the calculations i = 0 for filename in ['Flood_Current_Depth_Jakarta_geographic.asc', 'Flood_Design_Depth_Jakarta_geographic.asc']: hazard_filename = os.path.join(TESTDATA, filename) exposure_filename = os.path.join(TESTDATA, population) # Save hazard_filename = '%s/%s' % (TESTDATA, filename) hazard_layer = save_to_geonode(hazard_filename, user=self.user, overwrite=True) hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name) # Check metadata assert_bounding_box_matches(hazard_layer, hazard_filename) haz_bbox_string = get_bounding_box_string(hazard_filename) check_layer(hazard_layer, full=True) # Run calculation c = Client() rv = c.post('/impact/api/calculate/', data=dict( hazard_server=INTERNAL_SERVER_URL, hazard=hazard_name, exposure_server=INTERNAL_SERVER_URL, exposure=exposure_name, bbox=exp_bbox_string, impact_function=plugin_name, keywords='test,flood,HKV')) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') data = json.loads(rv.content) if 'errors' in data: errors = data['errors'] if errors is not None: raise Exception(errors) assert 'hazard_layer' in data assert 'exposure_layer' in data assert 'run_duration' in data assert 'run_date' in data assert 'layer' in data # Do calculation manually and check result hazard_raster = read_layer(hazard_filename) H = hazard_raster.get_data(nan=0) exposure_raster = read_layer(exposure_filename + '.asc') P = exposure_raster.get_data(nan=0) # Calculate impact manually pixel_area = 2500 I = numpy.where(H > 0.1, P, 0) / 100000.0 * pixel_area # Verify correctness against results from HKV res = sum(I.flat) ref = expected_values[i] #print filename, 'Result=%f' % res, ' Expected=%f' % ref #print 'Pct relative error=%f' % (abs(res-ref)*100./ref) msg = 'Got result %f but expected %f' % (res, ref) assert numpy.allclose(res, ref, rtol=1.0e-2), msg # Verify correctness of result # Download result and check layer_name = data['layer'].split('/')[-1] result_layer = download(INTERNAL_SERVER_URL, layer_name, get_bounding_box_string(hazard_filename)) assert os.path.exists(result_layer.filename) calculated_raster = read_layer(result_layer.filename) C = calculated_raster.get_data(nan=0) # FIXME (Ole): Bring this back # Check caption #caption = calculated_raster.get_caption() #print #print caption #expct = 'people' #msg = ('Caption %s did not contain expected ' # 'keyword %s' % (caption, expct)) #assert expct in caption, msg # Compare shape and extrema msg = ('Shape of calculated raster differs from reference raster: ' 'C=%s, I=%s' % (C.shape, I.shape)) assert numpy.allclose(C.shape, I.shape, rtol=1e-12, atol=1e-12), msg msg = ('Minimum of calculated raster differs from reference ' 'raster: ' 'C=%s, I=%s' % (numpy.nanmin(C), numpy.nanmin(I))) assert numpy.allclose(numpy.nanmin(C), numpy.nanmin(I), rtol=1e-6, atol=1e-12), msg msg = ('Maximum of calculated raster differs from reference ' 'raster: ' 'C=%s, I=%s' % (numpy.nanmax(C), numpy.nanmax(I))) assert numpy.allclose(numpy.nanmax(C), numpy.nanmax(I), rtol=1e-6, atol=1e-12), msg # Compare every single value numerically (a bit loose - # probably due to single precision conversions when # data flows through geonode) # # FIXME: Not working - but since this test is about # issue #162 we'll leave it for now. TODO with NAN # Manually verified that the two expected values are correct, # though. #msg = 'Array values of written raster array were not as expected' #print C #print I #print numpy.amax(numpy.abs(C-I)) #assert numpy.allclose(C, I, rtol=1e-2, atol=1e-5), msg # Check that extrema are in range xmin, xmax = calculated_raster.get_extrema() assert numpy.alltrue(C[-numpy.isnan(C)] >= xmin), msg assert numpy.alltrue(C[-numpy.isnan(C)] <= xmax) assert numpy.alltrue(C[-numpy.isnan(C)] >= 0) i += 1
def calculate(request, save_output=save_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) try: # Input checks msg = 'This cannot happen :-)' assert isinstance(bbox, basestring), msg check_bbox_string(bbox) # Find the intersection of bounding boxes for viewport, # hazard and exposure. vpt_bbox = bboxstring2list(bbox) haz_bbox = get_metadata(hazard_server, hazard_layer)['bounding_box'] exp_bbox = get_metadata(exposure_server, exposure_layer)['bounding_box'] # Impose minimum bounding box size (as per issue #101). # FIXME (Ole): This will need to be revisited in conjunction with # raster resolutions at some point. min_res = 0.00833334 eps = 1.0e-1 vpt_bbox = minimal_bounding_box(vpt_bbox, min_res, eps=eps) haz_bbox = minimal_bounding_box(haz_bbox, min_res, eps=eps) exp_bbox = minimal_bounding_box(exp_bbox, min_res, eps=eps) # New bounding box for data common to hazard, exposure and viewport # Download only data within this intersection intersection = bbox_intersection(vpt_bbox, haz_bbox, exp_bbox) if intersection is None: # Bounding boxes did not overlap msg = ('Bounding boxes of hazard data, exposure data and ' 'viewport did not overlap, so no computation was ' 'done. Please try again.') logger.info(msg) raise Exception(msg) bbox = bboxlist2string(intersection) plugin_list = get_plugins(impact_function_name) _, impact_function = plugin_list[0].items()[0] impact_function_source = inspect.getsource(impact_function) calculation.impact_function_source = impact_function_source calculation.bbox = bbox calculation.save() msg = 'Performing requested calculation' logger.info(msg) # Download selected layer objects msg = ('- Downloading hazard layer %s from %s' % (hazard_layer, hazard_server)) logger.info(msg) H = download(hazard_server, hazard_layer, bbox) msg = ('- Downloading exposure layer %s from %s' % (exposure_layer, exposure_server)) logger.info(msg) E = download(exposure_server, exposure_layer, bbox) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function) logger.info(msg) impact_filename = calculate_impact(layers=[H, E], impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_filename) logger.info(msg) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) except Exception, e: #FIXME: Reimplement error saving for calculation logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def calculate(request, save_output=save_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes(haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function impact_function = get_plugin(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function) logger.info(msg) impact_filename = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_filename) logger.info(msg) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')