def test_geotransform_from_geonode(self): """Geotransforms of GeoNode layers can be correctly determined """ for filename in ['lembang_mmi_hazmap.asc', 'test_grid.asc']: # Upload file to GeoNode f = os.path.join(TESTDATA, filename) layer = save_to_geonode(f, user=self.user) # Read raster file and obtain reference resolution R = read_layer(f) ref_geotransform = R.get_geotransform() # Get geotransform from GeoNode layer_name = layer.typename metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) geotransform_name = 'geotransform' msg = ('Could not find attribute "%s" in metadata. ' 'Values are: %s' % (geotransform_name, metadata.keys())) assert geotransform_name in metadata, msg gn_geotransform = metadata[geotransform_name] msg = ('Geotransform obtained from GeoNode for layer %s ' 'was not correct. I got %s but expected %s' '' % (layer_name, gn_geotransform, ref_geotransform)) assert numpy.allclose(ref_geotransform, gn_geotransform), msg
def test_plugin_compatibility(self): """Default plugins perform as expected """ # Upload a raster and a vector data set hazard_filename = os.path.join(UNITDATA, 'hazard', 'jakarta_flood_design.tif') hazard_layer = save_to_geonode(hazard_filename) check_layer(hazard_layer, full=True) exposure_filename = os.path.join(UNITDATA, 'exposure', 'buildings_osm_4326.shp') exposure_layer = save_to_geonode(exposure_filename) check_layer(exposure_layer, full=True) # Test plugin_list = get_admissible_plugins() assert len(plugin_list) > 0 geoserver = {'url': settings.GEOSERVER_BASE_URL + 'ows', 'name': 'Local Geoserver', 'version': '1.0.0', 'id': 0} metadata = get_metadata(geoserver['url']) msg = 'There were no layers in test geoserver' assert len(metadata) > 0, msg # Characterisation test to preserve the behaviour of # get_layer_descriptors. FIXME: I think we should change this to be # a dictionary of metadata entries (ticket #126). reference = {'geonode:buildings_osm_4326': {'layertype': 'vector', 'category': 'exposure', 'subcategory': 'structure', 'title': 'buildings_osm_4326'}, 'geonode:jakarta_flood_like_2007_with_structural_improvements': {'layertype': 'raster', 'category': 'hazard', 'subcategory': 'flood', 'title': 'Jakarta flood like 2007 with structural improvements'} } for name, keywords in reference.items(): msg = 'Expected layer %s in %s' % (name, metadata.keys()) assert name in metadata.keys(), msg values = metadata[name]['keywords'] for key in keywords.keys(): refval = keywords[key] val = values[key] msg = ('Got value "%s" for key "%s" ' 'Expected "%s"' % (val, key, refval)) assert refval == val, msg plugins = get_admissible_plugins(keywords=reference.values()) msg = 'No compatible layers returned' assert len(plugins) > 0, msg
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes(haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % ( impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def questions(request): """Get a list of all the questions, layers and functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 gs = request.GET['geoservers'].split(',') geoservers = [{'url': g, 'version': '1.0.0'} for g in gs] else: geoservers = get_servers(request.user) layers = {} functions = {} for geoserver in geoservers: layers.update(get_metadata(geoserver['url'])) admissible_plugins = get_admissible_plugins() for name, f in admissible_plugins.items(): functions[name] = {'doc': f.__doc__, } for key in ['author', 'title', 'rating']: if hasattr(f, key): functions[name][key] = getattr(f, key) output = {'layers': layers, 'functions': functions} hazards = [] exposures = [] # First get the list of all hazards and exposures for name, params in layers.items(): keywords = params['keywords'] if 'category' in keywords: if keywords['category'] == 'hazard': hazards.append(name) elif keywords['category'] == 'exposure': exposures.append(name) questions = [] # Then iterate over hazards and exposures to find 3-tuples of hazard, exposure and functions for hazard in hazards: for exposure in exposures: hazard_keywords = layers[hazard]['keywords'] exposure_keywords = layers[exposure]['keywords'] hazard_keywords['layertype'] = layers[hazard]['layertype'] exposure_keywords['layertype'] = layers[exposure]['layertype'] keywords = [hazard_keywords, exposure_keywords] plugins = get_admissible_plugins(keywords=keywords) for function in plugins: questions.append({'hazard': hazard, 'exposure': exposure, 'function': function}) output['questions'] = questions jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
} output['raw'] = raw links = result.link_set.all() links_dict = {} for item in links: links_dict[item.name] = {'url': item.url, 'link_type': item.link_type, 'extension': item.extension } output['links'] = links_dict layer = get_metadata(ows_server_url, layer_name=result.typename) output['layer'] = layer output['caption'] = 'Calculation finished ' \ 'in %s' % calculation.run_duration # Delete _state and _user_cache item from the dict, # they were created automatically by Django del output['_user_cache'] del output['_state'] # If success == True and errors = '' ... # ... let's make errors=None for backwards compat if output['success'] and len(output['errors']) == 0: output['errors'] = None
def test_keywords_download(self): """Keywords are downloaded from GeoServer along with layer data """ # Upload test data filenames = ['padang_tsunami_mw8.tif',] filenames = ['jakarta_flood_design.tif',] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(UNITDATA, 'hazard', filename) # Upload to GeoNode layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): # Get reference keyword dictionary from file L = read_layer(paths[i]) ref_keywords = L.get_keywords() # Get keywords metadata from GeoServer layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'keywords' in metadata geo_keywords = metadata['keywords'] msg = ('Uploaded keywords were not as expected: I got %s ' 'but expected %s' % (geo_keywords, ref_keywords)) for kw in ref_keywords: # Check that all keywords were uploaded # It is OK for new automatic keywords to have appeared # (e.g. resolution) - see issue #171 assert kw in geo_keywords, msg assert ref_keywords[kw] == geo_keywords[kw], msg # Download data bbox = get_bounding_box_string(paths[i]) H = download(INTERNAL_SERVER_URL, layer_name, bbox) dwn_keywords = H.get_keywords() msg = ('Downloaded keywords were not as expected: I got %s ' 'but expected %s' % (dwn_keywords, geo_keywords)) assert geo_keywords == dwn_keywords, msg # Check that the layer and its .keyword file is there. msg = 'Downloaded layer %s was not found' % H.filename assert os.path.isfile(H.filename), msg kw_filename = os.path.splitext(H.filename)[0] + '.keywords' msg = 'Downloaded keywords file %s was not found' % kw_filename assert os.path.isfile(kw_filename), msg # Check that keywords are OK when reading downloaded file L = read_layer(H.filename) read_keywords = L.get_keywords() msg = ('Keywords in downloaded file %s were not as expected: ' 'I got %s but expected %s' % (kw_filename, read_keywords, geo_keywords)) assert read_keywords == geo_keywords, msg
def test_metadata(self): """Metadata is retrieved correctly for both raster and vector data """ # Upload test data filenames = [os.path.join('hazard', 'jakarta_flood_design.tif'), os.path.join('exposure', 'buildings_osm_4326.shp')] layers = [] paths = [] for filename in filenames: path = os.path.join(UNITDATA, filename) layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): if filenames[i].endswith('.shp'): layertype = 'vector' elif filenames[i].endswith('.tif'): layertype = 'raster' else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .tif' % filenames[i]) raise Exception(msg) layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layertype' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, paths[i]) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(paths[i]) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (paths[i], ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.title == metadata['title'] assert layer_name == metadata['id'] assert layertype == metadata['layertype'] # Check keywords if layertype == 'raster': category = 'hazard' subcategory = 'flood' elif layertype == 'vector': category = 'exposure' subcategory = 'structure' else: msg = 'Unknown layer type %s' % layertype raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = 'Did not find key "subcategory" in keywords: %s' % keywords assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = ['jakarta_flood_design.tif', ] for org_filename in filenames: org_basename, ext = os.path.splitext(os.path.join(UNITDATA, 'hazard', org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp -f %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) # Not needed since we are dealing with a raster #cmd = '/bin/cp -f %s.prj %s.prj' % (org_basename, basename) #os.system(cmd) if ext == '.tif': layertype = 'raster' filename = '%s.tif' % basename cmd = '/bin/cp %s.tif %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layertype = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layertype' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.title == metadata['title'] assert layer_name == metadata['id'] assert layertype == metadata['layertype'] # Check keywords if layertype == 'raster': category = 'hazard' subcategory = 'flood' else: msg = 'Unknown layer type %s' % layertype raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_metadata(self): """Metadata is retrieved correctly for both raster and vector data """ # Upload test data filenames = [ os.path.join('hazard', 'jakarta_flood_design.tif'), os.path.join('exposure', 'buildings_osm_4326.shp') ] layers = [] paths = [] for filename in filenames: path = os.path.join(UNITDATA, filename) layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): if filenames[i].endswith('.shp'): layertype = 'vector' elif filenames[i].endswith('.tif'): layertype = 'raster' else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .tif' % filenames[i]) raise Exception(msg) layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'id' in metadata assert 'title' in metadata assert 'layertype' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, paths[i]) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(paths[i]) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (paths[i], ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.title == metadata['title'] assert layer_name == metadata['id'] assert layertype == metadata['layertype'] # Check keywords if layertype == 'raster': category = 'hazard' subcategory = 'flood' elif layertype == 'vector': category = 'exposure' subcategory = 'structure' else: msg = 'Unknown layer type %s' % layertype raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = 'Did not find key "subcategory" in keywords: %s' % keywords assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_keywords_download(self): """Keywords are downloaded from GeoServer along with layer data """ # Upload test data filenames = [ 'padang_tsunami_mw8.tif', ] filenames = [ 'jakarta_flood_design.tif', ] layers = [] paths = [] for filename in filenames: basename, ext = os.path.splitext(filename) path = os.path.join(UNITDATA, 'hazard', filename) # Upload to GeoNode layer = save_to_geonode(path, user=self.user, overwrite=True) # Record layer and file layers.append(layer) paths.append(path) # Check integrity for i, layer in enumerate(layers): # Get reference keyword dictionary from file L = read_layer(paths[i]) ref_keywords = L.get_keywords() # Get keywords metadata from GeoServer layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) assert 'keywords' in metadata geo_keywords = metadata['keywords'] msg = ('Uploaded keywords were not as expected: I got %s ' 'but expected %s' % (geo_keywords, ref_keywords)) for kw in ref_keywords: # Check that all keywords were uploaded # It is OK for new automatic keywords to have appeared # (e.g. resolution) - see issue #171 assert kw in geo_keywords, msg assert ref_keywords[kw] == geo_keywords[kw], msg # Download data bbox = get_bounding_box_string(paths[i]) H = download(INTERNAL_SERVER_URL, layer_name, bbox) dwn_keywords = H.get_keywords() msg = ('Downloaded keywords were not as expected: I got %s ' 'but expected %s' % (dwn_keywords, geo_keywords)) assert geo_keywords == dwn_keywords, msg # Check that the layer and its .keyword file is there. msg = 'Downloaded layer %s was not found' % H.filename assert os.path.isfile(H.filename), msg kw_filename = os.path.splitext(H.filename)[0] + '.keywords' msg = 'Downloaded keywords file %s was not found' % kw_filename assert os.path.isfile(kw_filename), msg # Check that keywords are OK when reading downloaded file L = read_layer(H.filename) read_keywords = L.get_keywords() msg = ('Keywords in downloaded file %s were not as expected: ' 'I got %s but expected %s' % (kw_filename, read_keywords, geo_keywords)) assert read_keywords == geo_keywords, msg
def test_metadata_twice(self): """Layer metadata can be correctly uploaded multiple times """ # This test reproduces ticket #99 by creating new data, # uploading twice and verifying metadata # Base test data filenames = [ 'jakarta_flood_design.tif', ] for org_filename in filenames: org_basename, ext = os.path.splitext( os.path.join(UNITDATA, 'hazard', org_filename)) # Copy data to temporary unique name basename = unique_filename(dir='/tmp') cmd = '/bin/cp -f %s.keywords %s.keywords' % (org_basename, basename) os.system(cmd) # Not needed since we are dealing with a raster #cmd = '/bin/cp -f %s.prj %s.prj' % (org_basename, basename) #os.system(cmd) if ext == '.tif': layertype = 'raster' filename = '%s.tif' % basename cmd = '/bin/cp %s.tif %s' % (org_basename, filename) os.system(cmd) elif ext == '.shp': layertype = 'vector' filename = '%s.shp' % basename for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']: cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename, e) os.system(cmd) else: msg = ('Unknown layer extension in %s. ' 'Expected .shp or .asc' % filename) raise Exception(msg) # Repeat multiple times for i in range(3): # Upload layer = save_to_geonode(filename, user=self.user, overwrite=True) # Get metadata layer_name = '%s:%s' % (layer.workspace, layer.name) metadata = get_metadata(INTERNAL_SERVER_URL, layer_name) # Verify assert 'id' in metadata assert 'title' in metadata assert 'layertype' in metadata assert 'keywords' in metadata assert 'bounding_box' in metadata assert len(metadata['bounding_box']) == 4 # Check integrity between Django layer and file assert_bounding_box_matches(layer, filename) # Check integrity between file and OWS metadata ref_bbox = get_bounding_box(filename) msg = ('Bounding box from OWS did not match bounding box ' 'from file. They are\n' 'From file %s: %s\n' 'From OWS: %s' % (filename, ref_bbox, metadata['bounding_box'])) assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg assert layer.title == metadata['title'] assert layer_name == metadata['id'] assert layertype == metadata['layertype'] # Check keywords if layertype == 'raster': category = 'hazard' subcategory = 'flood' else: msg = 'Unknown layer type %s' % layertype raise Exception(msg) keywords = metadata['keywords'] msg = 'Did not find key "category" in keywords: %s' % keywords assert 'category' in keywords, msg msg = ('Did not find key "subcategory" in keywords: %s' % keywords) assert 'subcategory' in keywords, msg msg = ('Category keyword %s did not match expected %s' % (keywords['category'], category)) assert category == keywords['category'], msg msg = ('Subcategory keyword %s did not match expected %s' % (keywords['subcategory'], category)) assert subcategory == keywords['subcategory'], msg
def test_plugin_compatibility(self): """Default plugins perform as expected """ # Upload a raster and a vector data set hazard_filename = os.path.join(UNITDATA, 'hazard', 'jakarta_flood_design.tif') hazard_layer = save_to_geonode(hazard_filename) check_layer(hazard_layer, full=True) exposure_filename = os.path.join(UNITDATA, 'exposure', 'buildings_osm_4326.shp') exposure_layer = save_to_geonode(exposure_filename) check_layer(exposure_layer, full=True) # Test plugin_list = get_admissible_plugins() assert len(plugin_list) > 0 geoserver = { 'url': settings.GEOSERVER_BASE_URL + 'ows', 'name': 'Local Geoserver', 'version': '1.0.0', 'id': 0 } metadata = get_metadata(geoserver['url']) msg = 'There were no layers in test geoserver' assert len(metadata) > 0, msg # Characterisation test to preserve the behaviour of # get_layer_descriptors. FIXME: I think we should change this to be # a dictionary of metadata entries (ticket #126). reference = { 'geonode:buildings_osm_4326': { 'layertype': 'vector', 'category': 'exposure', 'subcategory': 'structure', 'title': 'buildings_osm_4326' }, 'geonode:jakarta_flood_like_2007_with_structural_improvements': { 'layertype': 'raster', 'category': 'hazard', 'subcategory': 'flood', 'title': 'Jakarta flood like 2007 with structural improvements' } } for name, keywords in reference.items(): msg = 'Expected layer %s in %s' % (name, metadata.keys()) assert name in metadata.keys(), msg values = metadata[name]['keywords'] for key in keywords.keys(): refval = keywords[key] val = values[key] msg = ('Got value "%s" for key "%s" ' 'Expected "%s"' % (val, key, refval)) assert refval == val, msg plugins = get_admissible_plugins(keywords=reference.values()) msg = 'No compatible layers returned' assert len(plugins) > 0, msg
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes( haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % (impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def questions(request): """Get a list of all the questions, layers and functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 gs = request.GET['geoservers'].split(',') geoservers = [{'url': g, 'version': '1.0.0'} for g in gs] else: geoservers = get_servers(request.user) layers = {} functions = {} for geoserver in geoservers: layers.update(get_metadata(geoserver['url'])) admissible_plugins = get_admissible_plugins() for name, f in admissible_plugins.items(): functions[name] = { 'doc': f.__doc__, } for key in ['author', 'title', 'rating']: if hasattr(f, key): functions[name][key] = getattr(f, key) output = {'layers': layers, 'functions': functions} hazards = [] exposures = [] # First get the list of all hazards and exposures for name, params in layers.items(): keywords = params['keywords'] if 'category' in keywords: if keywords['category'] == 'hazard': hazards.append(name) elif keywords['category'] == 'exposure': exposures.append(name) questions = [] # Then iterate over hazards and exposures to find 3-tuples of hazard, exposure and functions for hazard in hazards: for exposure in exposures: hazard_keywords = layers[hazard]['keywords'] exposure_keywords = layers[exposure]['keywords'] hazard_keywords['layertype'] = layers[hazard]['layertype'] exposure_keywords['layertype'] = layers[exposure]['layertype'] keywords = [hazard_keywords, exposure_keywords] plugins = get_admissible_plugins(keywords=keywords) for function in plugins: questions.append({ 'hazard': hazard, 'exposure': exposure, 'function': function }) output['questions'] = questions jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
output['raw'] = raw links = result.link_set.all() links_dict = {} for item in links: links_dict[item.name] = { 'url': item.url, 'link_type': item.link_type, 'extension': item.extension } output['links'] = links_dict layer = get_metadata(ows_server_url, layer_name=result.typename) output['layer'] = layer output['caption'] = 'Calculation finished ' \ 'in %s' % calculation.run_duration # Delete _state and _user_cache item from the dict, # they were created automatically by Django del output['_user_cache'] del output['_state'] # If success == True and errors = '' ... # ... let's make errors=None for backwards compat if output['success'] and len(output['errors']) == 0: output['errors'] = None