def test_geotransform_from_geonode(self): """Geotransforms of GeoNode layers can be correctly determined """ for filename in ['lembang_mmi_hazmap.asc', 'test_grid.asc']: # Upload file to GeoNode f = os.path.join(TESTDATA, filename) layer = save_to_geonode(f, user=self.user) # Read raster file and obtain reference resolution R = read_layer(f) ref_geotransform = R.get_geotransform() # Get geotransform from GeoNode layer_name = layer.typename metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) geotransform_name = 'geotransform' msg = ('Could not find attribute "%s" in metadata. ' 'Values are: %s' % (geotransform_name, metadata.keys())) assert geotransform_name in metadata, msg gn_geotransform = metadata[geotransform_name] msg = ('Geotransform obtained from GeoNode for layer %s ' 'was not correct. I got %s but expected %s' '' % (layer_name, gn_geotransform, ref_geotransform)) assert numpy.allclose(ref_geotransform, gn_geotransform), msg
def test_another_asc(self): """Real world ASCII file can be uploaded """ thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc') layer = save_to_geonode(thefile, user=self.user, overwrite=True) check_layer(layer, full=True) # Verify metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert 'geotransform' in metadata assert len(metadata['bounding_box']) == 4 # A little metadata characterisation test ref = {'layer_type': 'raster', 'keywords': {'category': 'hazard', 'subcategory': 'earthquake'}, 'geotransform': (105.29857, 0.0112, 0.0, -5.565233000000001, 0.0, -0.0112), 'title': 'lembang_mmi_hazmap'} for key in ['layer_type', 'keywords', 'geotransform', 'title']: msg = ('Expected metadata for key %s to be %s. ' 'Instead got %s' % (key, ref[key], metadata[key])) if key == 'geotransform': assert numpy.allclose(metadata[key], ref[key]), msg else: assert metadata[key] == ref[key], msg if key == 'keywords': kwds = metadata[key] for k in kwds: assert kwds[k] == ref[key][k]
def test_geotransform_from_geonode(self): """Geotransforms of GeoNode layers can be correctly determined """ for filename in ['lembang_mmi_hazmap.asc', 'test_grid.asc', 'shakemap_padang_20090930.asc', 'Population_2010_clip.tif']: # Upload file to GeoNode f = os.path.join(TEST_DATA, filename) layer = save_to_geonode(f, user=self.user) name = '%s:%s' % (layer.workspace, layer.name) # Read raster file and obtain reference resolution R = read_layer(f) ref_geotransform = R.get_geotransform() # ARIEL: geotransform is a vector of six numbers: # # (top left x, w-e pixel resolution, rotation, # top left y, rotation, n-s pixel resolution). # # We should (at least) use elements 0, 1, 3, 5 # to uniquely determine if rasters are aligned # - This depends on what you can get from geonode # Get geotransform from GeoNode layer_name = layer.name metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) geotransform_name = 'geotransform' msg = ('Could not find attribute "%s" in metadata. Values are: %s' % (geotransform_name, metadata.keys())) assert geotransform_name in metadata, msg gn_geotransform = metadata['geo_transform'] msg = ('Geotransform obtained from GeoNode for layer %s ' 'was not correct. I got %s but expected %s' '' % (name, gn_geotransform, ref_geotransform)) assert numpy.allclose(ref_geotransform, gn_geotransform), msg
def calculate(request, save_output=save_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) try: # Input checks msg = 'This cannot happen :-)' assert isinstance(bbox, basestring), msg check_bbox_string(bbox) # Find the intersection of bounding boxes for viewport, # hazard and exposure. vpt_bbox = bboxstring2list(bbox) haz_bbox = get_metadata(hazard_server, hazard_layer)['bounding_box'] exp_bbox = get_metadata(exposure_server, exposure_layer)['bounding_box'] # Impose minimum bounding box size (as per issue #101). # FIXME (Ole): This will need to be revisited in conjunction with # raster resolutions at some point. min_res = 0.00833334 eps = 1.0e-1 vpt_bbox = minimal_bounding_box(vpt_bbox, min_res, eps=eps) haz_bbox = minimal_bounding_box(haz_bbox, min_res, eps=eps) exp_bbox = minimal_bounding_box(exp_bbox, min_res, eps=eps) # New bounding box for data common to hazard, exposure and viewport # Download only data within this intersection intersection = bbox_intersection(vpt_bbox, haz_bbox, exp_bbox) if intersection is None: # Bounding boxes did not overlap msg = ('Bounding boxes of hazard data, exposure data and ' 'viewport did not overlap, so no computation was ' 'done. Please try again.') logger.info(msg) raise Exception(msg) bbox = bboxlist2string(intersection) plugin_list = get_plugins(impact_function_name) _, impact_function = plugin_list[0].items()[0] impact_function_source = inspect.getsource(impact_function) calculation.impact_function_source = impact_function_source calculation.bbox = bbox calculation.save() msg = 'Performing requested calculation' logger.info(msg) # Download selected layer objects msg = ('- Downloading hazard layer %s from %s' % (hazard_layer, hazard_server)) logger.info(msg) H = download(hazard_server, hazard_layer, bbox) msg = ('- Downloading exposure layer %s from %s' % (exposure_layer, exposure_server)) logger.info(msg) E = download(exposure_server, exposure_layer, bbox) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function) logger.info(msg) impact_filename = calculate_impact(layers=[H, E], impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_filename) logger.info(msg) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) except Exception, e: #FIXME: Reimplement error saving for calculation logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def test_metadata(self): """Metadata is retrieved correctly for both raster and vector data """ # Upload test data filenames = ['Lembang_Earthquake_Scenario.asc', 'Earthquake_Ground_Shaking.asc', 'lembang_schools.shp', 'Padang_WGS84.shp'] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(TESTDATA, filename) layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): if filenames[i].endswith('.shp'): layer_type = 'vector' elif filenames[i].endswith('.asc'): layer_type = 'raster' else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filenames[i]) raise Exception(msg) layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, paths[i]) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(paths[i]) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (paths[i], ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = 'Did not find key "subcategory" in keywords: %s' % keywords assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext(os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_layer_upload(self): """Layers can be uploaded to local GeoNode """ expected_layers = [] not_expected_layers = [] datadir = TESTDATA BAD_LAYERS = ['grid_without_projection.asc', 'kecamatan_prj.shp'] # FIXME(Ole): This layer is not # 'BAD', just in a different # projection (TM3_Zone_48-2) so # serves as another test for # issue #40 for root, dirs, files in os.walk(datadir): for filename in files: basename, extension = os.path.splitext(filename) if extension.lower() in LAYER_TYPES: # FIXME(Ole): GeoNode converts names to lower case name = unicode(basename.lower()) if filename in BAD_LAYERS: not_expected_layers.append(name) else: expected_layers.append(name) # Upload layers = save_to_geonode(datadir, user=self.user, overwrite=True, ignore=BAD_LAYERS) # Check integrity layer_names = [l.name for l in layers] for layer in layers: msg = 'Layer %s was uploaded but not expected' % layer.name assert layer.name in expected_layers, msg # Uncomment to reproduce issue #102 # This may still also reproduce issue #40 for layer # tsunami_max_inundation_depth_bb_utm #check_layer(layer, full=True) for layer_name in expected_layers: msg = ('The following layer should have been uploaded ' 'but was not: %s' % layer_name) assert layer_name in layer_names, msg # Check the layer is in the Django database Layer.objects.get(name=layer_name) # Check that layer is in geoserver found = False gs_username, gs_password = settings.GEOSERVER_CREDENTIALS page = get_web_page(os.path.join(settings.GEOSERVER_BASE_URL, 'rest/layers'), username=gs_username, password=gs_password) for line in page: if line.find('rest/layers/%s.html' % layer_name) > 0: found = True if not found: msg = ('Upload could not be verified, the layer %s is not ' 'in geoserver %s, but GeoNode did not raise any errors, ' 'this should never happen.' % (layer_name, settings.GEOSERVER_BASE_URL)) raise GeoNodeException(msg) server_url = settings.GEOSERVER_BASE_URL + 'ows?' # Verify that the GeoServer GetCapabilities record is accessible: metadata = get_metadata(server_url) msg = ('The metadata list should not be empty in server %s' % server_url) assert len(metadata) > 0, msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp'] for org_filename in filenames: org_basename, ext = os.path.splitext( os.path.join(TESTDATA, org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename) os.system(cmd) if ext == '.asc': layer_type = 'raster' filename = '%s.asc' % basename cmd = '/bin/cp %s.asc %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layer_type = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layer_type' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.name == metadata['title'] assert layer_name == metadata['id'] assert layer_type == metadata['layer_type'] # Check keywords if layer_type == 'raster': category = 'hazard' subcategory = 'earthquake' elif layer_type == 'vector': category = 'exposure' subcategory = 'building' else: msg = 'Unknown layer type %s' % layer_type raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_layer_upload(self): """Layers can be uploaded to local GeoNode """ expected_layers = [] not_expected_layers = [] datadir = TESTDATA BAD_LAYERS = ['grid_without_projection.asc', 'kecamatan_prj.shp'] # FIXME(Ole): This layer is not # 'BAD', just in a different # projection (TM3_Zone_48-2) so # serves as another test for # issue #40 for root, dirs, files in os.walk(datadir): for filename in files: basename, extension = os.path.splitext(filename) if extension.lower() in LAYER_TYPES: # FIXME(Ole): GeoNode converts names to lower case name = unicode(basename.lower()) if filename in BAD_LAYERS: not_expected_layers.append(name) else: expected_layers.append(name) # Upload layers = save_to_geonode(datadir, user=self.user, overwrite=True, ignore=BAD_LAYERS) # Check integrity layer_names = [l.name for l in layers] for layer in layers: msg = 'Layer %s was uploaded but not expected' % layer.name assert layer.name in expected_layers, msg # Uncomment to reproduce issue #102 # This may still also reproduce issue #40 for layer # tsunami_max_inundation_depth_bb_utm #check_layer(layer, full=True) for layer_name in expected_layers: msg = ('The following layer should have been uploaded ' 'but was not: %s' % layer_name) assert layer_name in layer_names, msg # Check the layer is in the Django database Layer.objects.get(name=layer_name) # Check that layer is in geoserver found = False gs_username, gs_password = settings.GEOSERVER_CREDENTIALS page = get_web_page(os.path.join(settings.GEOSERVER_BASE_URL, 'rest/layers'), username=gs_username, password=gs_password) for line in page: if line.find('rest/layers/%s.html' % layer_name) > 0: found = True if not found: msg = ( 'Upload could not be verified, the layer %s is not ' 'in geoserver %s, but GeoNode did not raise any errors, ' 'this should never happen.' % (layer_name, settings.GEOSERVER_BASE_URL)) raise GeoNodeException(msg) server_url = settings.GEOSERVER_BASE_URL + 'ows?' # Verify that the GeoServer GetCapabilities record is accessible: metadata = get_metadata(server_url) msg = ('The metadata list should not be empty in server %s' % server_url) assert len(metadata) > 0, msg
def test_keywords_download(self): """Keywords are downloaded from GeoServer along with layer data """ # Upload test data filenames = [ 'Lembang_Earthquake_Scenario.asc', 'Padang_WGS84.shp', 'maumere_aos_depth_20m_land_wgs84.asc' ] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(TESTDATA, filename) # Upload to GeoNode layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): # Get reference keyword dictionary from file L = read_layer(paths[i]) ref_keywords = L.get_keywords() # Get keywords metadata from GeoServer layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'keywords' in metadata geo_keywords = metadata['keywords'] msg = ('Uploaded keywords were not as expected: I got %s ' 'but expected %s' % (geo_keywords, ref_keywords)) for kw in ref_keywords: # Check that all keywords were uploaded # It is OK for new automatic keywords to have appeared # (e.g. resolution) - see issue #171 assert kw in geo_keywords, msg assert ref_keywords[kw] == geo_keywords[kw], msg # Download data bbox = get_bounding_box_string(paths[i]) H = download(INTERNAL_SERVER_URL, layer_name, bbox) dwn_keywords = H.get_keywords() msg = ('Downloaded keywords were not as expected: I got %s ' 'but expected %s' % (dwn_keywords, geo_keywords)) assert geo_keywords == dwn_keywords, msg # Check that the layer and its .keyword file is there. msg = 'Downloaded layer %s was not found' % H.filename assert os.path.isfile(H.filename), msg kw_filename = os.path.splitext(H.filename)[0] + '.keywords' msg = 'Downloaded keywords file %s was not found' % kw_filename assert os.path.isfile(kw_filename), msg # Check that keywords are OK when reading downloaded file L = read_layer(H.filename) read_keywords = L.get_keywords() msg = ('Keywords in downloaded file %s were not as expected: ' 'I got %s but expected %s' % (kw_filename, read_keywords, geo_keywords)) assert read_keywords == geo_keywords, msg
def calculate(request, save_output=save_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes(haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function impact_function = get_plugin(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function) logger.info(msg) impact_filename = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_filename) logger.info(msg) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')