def test_plugin_compatibility(self): """Default plugins perform as expected """ # Upload a raster and a vector data set hazard_filename = os.path.join(TESTDATA, 'shakemap_padang_20090930.asc') hazard_layer = save_to_geonode(hazard_filename) check_layer(hazard_layer, full=True) exposure_filename = os.path.join(TESTDATA, 'lembang_schools.shp') exposure_layer = save_to_geonode(exposure_filename) check_layer(exposure_layer, full=True) # Test plugin_list = get_plugins() assert len(plugin_list) > 0 geoserver = {'url': settings.GEOSERVER_BASE_URL + 'ows', 'name': 'Local Geoserver', 'version': '1.0.0', 'id': 0} metadata = get_layer_descriptors(geoserver['url']) msg = 'There were no layers in test geoserver' assert len(metadata) > 0, msg # Characterisation test to preserve the behaviour of # get_layer_descriptors. FIXME: I think we should change this to be # a dictionary of metadata entries (ticket #126). reference = [['geonode:lembang_schools', {'layer_type': 'feature', 'category': 'exposure', 'subcategory': 'building', 'title': 'lembang_schools'}], ['geonode:shakemap_padang_20090930', {'layer_type': 'raster', 'category': 'hazard', 'subcategory': 'earthquake', 'title': 'shakemap_padang_20090930'}]] for entry in reference: name, mdblock = entry i = [x[0] for x in metadata].index(name) assert name == metadata[i][0] for key in entry[1]: assert entry[1][key] == metadata[i][1][key] # Check plugins are returned annotated_plugins = [{'name': name, 'doc': f.__doc__, 'layers': compatible_layers(f, metadata)} for name, f in plugin_list.items()] msg = 'No compatible layers returned' assert len(annotated_plugins) > 0, msg
def test_requirements_check(self): """Plugins are correctly filtered based on requirements""" plugin_list = get_plugins('BasicFunction') assert(len(plugin_list) == 1) requirements = requirements_collect(plugin_list[0].values()[0]) msg = 'Requirements are %s' % requirements assert(len(requirements) == 1), msg for req_str in requirements: msg = 'Should eval to True' assert(requirement_check({'category': 'hazard'}, req_str) is True), msg msg = 'Should eval to False' assert(requirement_check({'broke': 'broke'}, req_str) is False), msg try: plugin_list = get_plugins('NotRegistered') except AssertionError: pass else: msg = 'Search should fail' raise Exception(msg)
def test_requirements_check(self): """Plugins are correctly filtered based on requirements""" plugin_list = get_plugins('BasicFunction') assert (len(plugin_list) == 1) requirements = requirements_collect(plugin_list[0].values()[0]) msg = 'Requirements are %s' % requirements assert (len(requirements) == 1), msg for req_str in requirements: msg = 'Should eval to True' assert (requirement_check({'category': 'hazard'}, req_str) is True), msg msg = 'Should eval to False' assert (requirement_check({'broke': 'broke'}, req_str) is False), msg try: plugin_list = get_plugins('NotRegistered') except AssertionError: pass else: msg = 'Search should fail' raise Exception(msg)
def functions(request): """Get a list of all the functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ plugin_list = get_plugins() if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 geolist = request.GET['geoservers'].split(',') geoservers = [{'url': geoserver, 'version': '1.0.0'} for geoserver in geolist] else: geoservers = get_servers(request.user) layers_metadata = [] # Iterate across all available geoservers and return every layer # and associated keywords for geoserver in geoservers: layers_metadata.extend( get_layers_metadata(geoserver['url'], geoserver['version'])) # For each plugin return all layers that meet the requirements # an empty layer is returned where the plugin cannot run annotated_plugins = [] for name, f in plugin_list.items(): layers = compatible_layers(f, layers_metadata) annotated_plugins.append({ 'name': name, 'doc': f.__doc__, 'layers': layers, }) output = {'functions': annotated_plugins} jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def test_get_plugins(self): """Plugins can be collected """ plugin_list = get_plugins() assert(len(plugin_list) > 0) # Check that every plugin has a requires line for plugin in plugin_list.values(): requirements = requirements_collect(plugin) msg = 'There were no requirements in plugin %s' % plugin assert(len(requirements) > 0), msg for req_str in requirements: msg = 'All plugins should return True or False' assert(requirement_check({'category': 'hazard', 'subcategory': 'earthquake', 'layerType': 'raster'}, req_str) in [True, False]), msg
def functions(request): """Get a list of all the functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ plugin_list = get_plugins() if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 geolist = request.GET['geoservers'].split(',') geoservers = [{'url': geoserver, 'version': '1.0.0'} for geoserver in geolist] else: geoservers = get_servers(request.user) # Iterate across all available geoservers and return all # layer descriptors for use with the plugin subsystem layer_descriptors = [] for geoserver in geoservers: layer_descriptors.extend( get_layer_descriptors(geoserver['url'])) # For each plugin return all layers that meet the requirements # an empty layer is returned where the plugin cannot run annotated_plugins = [] for name, f in plugin_list.items(): layers = compatible_layers(f, layer_descriptors) annotated_plugins.append({'name': name, 'doc': f.__doc__, 'layers': layers}) output = {'functions': annotated_plugins} jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def debug(request): """Show a list of all the functions""" plugin_list = get_plugins() plugins_info = [] for name, f in plugin_list.items(): if not 'doc' in request.GET: plugins_info.append({ 'name': name, 'location': f.__module__, }) else: plugins_info.append({ 'name': name, 'location': f.__module__, 'doc': f.__doc__, }) output = {'plugins': plugins_info} jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def test_get_plugins(self): """Plugins can be collected """ plugin_list = get_plugins() assert (len(plugin_list) > 0) # Check that every plugin has a requires line for plugin in plugin_list.values(): requirements = requirements_collect(plugin) msg = 'There were no requirements in plugin %s' % plugin assert (len(requirements) > 0), msg for req_str in requirements: msg = 'All plugins should return True or False' assert (requirement_check( { 'category': 'hazard', 'subcategory': 'earthquake', 'layerType': 'raster' }, req_str) in [True, False]), msg
def test_plugin_compatability(self): """Performance of the default plugins using internal GeoServer """ plugin_list = get_plugins() assert len(plugin_list) > 0 geoserver = {'url': settings.GEOSERVER_BASE_URL + 'ows', 'name': 'Local Geoserver', 'version': '1.0.0', 'id': 0} layers = get_layers_metadata(geoserver['url'], geoserver['version']) msg = 'There were no layers in test geoserver' assert len(layers) > 0, msg annotated_plugins = [{'name': name, 'doc': f.__doc__, 'layers': compatible_layers(f, layers)} for name, f in plugin_list.items()] msg = 'No compatible layers returned' assert len(annotated_plugins) > 0, msg
def calculate(request, save_output=save_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) try: # Input checks msg = 'This cannot happen :-)' assert isinstance(bbox, basestring), msg check_bbox_string(bbox) # Find the intersection of bounding boxes for viewport, # hazard and exposure. vpt_bbox = bboxstring2list(bbox) haz_bbox = get_metadata(hazard_server, hazard_layer)['bounding_box'] exp_bbox = get_metadata(exposure_server, exposure_layer)['bounding_box'] # Impose minimum bounding box size (as per issue #101). # FIXME (Ole): This will need to be revisited in conjunction with # raster resolutions at some point. min_res = 0.00833334 eps = 1.0e-1 vpt_bbox = minimal_bounding_box(vpt_bbox, min_res, eps=eps) haz_bbox = minimal_bounding_box(haz_bbox, min_res, eps=eps) exp_bbox = minimal_bounding_box(exp_bbox, min_res, eps=eps) # New bounding box for data common to hazard, exposure and viewport # Download only data within this intersection intersection = bbox_intersection(vpt_bbox, haz_bbox, exp_bbox) if intersection is None: # Bounding boxes did not overlap msg = ('Bounding boxes of hazard data, exposure data and ' 'viewport did not overlap, so no computation was ' 'done. Please try again.') logger.info(msg) raise Exception(msg) bbox = bboxlist2string(intersection) plugin_list = get_plugins(impact_function_name) _, impact_function = plugin_list[0].items()[0] impact_function_source = inspect.getsource(impact_function) calculation.impact_function_source = impact_function_source calculation.bbox = bbox calculation.save() msg = 'Performing requested calculation' logger.info(msg) # Download selected layer objects msg = ('- Downloading hazard layer %s from %s' % (hazard_layer, hazard_server)) logger.info(msg) H = download(hazard_server, hazard_layer, bbox) msg = ('- Downloading exposure layer %s from %s' % (exposure_layer, exposure_server)) logger.info(msg) E = download(exposure_server, exposure_layer, bbox) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function) logger.info(msg) impact_filename = calculate_impact(layers=[H, E], impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_filename) logger.info(msg) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) except Exception, e: #FIXME: Reimplement error saving for calculation logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def calculate(request, save_output=dummy_save): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] bbox = data['bbox'] keywords = data['keywords'] theuser = get_guaranteed_valid_user(request.user) plugin_list = get_plugins(impact_function_name) _, impact_function = plugin_list[0].items()[0] impact_function_source = inspect.getsource(impact_function) # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server='exposure_server', exposure_layer='exposure_layer', impact_function=impact_function_name, impact_function_source=impact_function_source, bbox=bbox, success=False) calculation.save() logger.info('Performing requested calculation') # Download selected layer objects logger.info('- Downloading hazard layer %s from %s' % (hazard_layer, hazard_server)) H = download(hazard_server, hazard_layer, bbox) logger.info('- Downloading exposure layer %s from %s' % (exposure_layer, exposure_server)) E = download(exposure_server, exposure_layer, bbox) # Calculate result using specified impact function logger.info('- Calculating impact using %s' % impact_function) impact_filename = calculate_impact(layers=[H, E], impact_function=impact_function) # Upload result to internal GeoServer logger.info('- Uploading impact layer %s' % impact_filename) result = save_output(impact_filename, title='output_%s' % start.isoformat(), user=theuser) logger.info('- Result available at %s.' % result.get_absolute_url()) calculation.layer = result.get_absolute_url() calculation.success = True calculation.save() output = calculation.__dict__ # json.dumps does not like datetime objects, # let's make it a json string ourselves output['run_date'] = 'new Date("%s")' % calculation.run_date # FIXME:This should not be needed in an ideal world ows_server_url = settings.GEOSERVER_BASE_URL + 'ows', output['ows_server_url'] = ows_server_url # json.dumps does not like django users output['user'] = calculation.user.username # Delete _state and _user_cache item from the dict, # they were created automatically by Django del output['_user_cache'] del output['_state'] jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def test_plugin_compatibility(self): """Default plugins perform as expected """ # Upload a raster and a vector data set hazard_filename = os.path.join(TESTDATA, 'shakemap_padang_20090930.asc') hazard_layer = save_to_geonode(hazard_filename) check_layer(hazard_layer, full=True) exposure_filename = os.path.join(TESTDATA, 'lembang_schools.shp') exposure_layer = save_to_geonode(exposure_filename) check_layer(exposure_layer, full=True) # Test plugin_list = get_plugins() assert len(plugin_list) > 0 geoserver = { 'url': settings.GEOSERVER_BASE_URL + 'ows', 'name': 'Local Geoserver', 'version': '1.0.0', 'id': 0 } metadata = get_layer_descriptors(geoserver['url']) msg = 'There were no layers in test geoserver' assert len(metadata) > 0, msg # Characterisation test to preserve the behaviour of # get_layer_descriptors. FIXME: I think we should change this to be # a dictionary of metadata entries (ticket #126). reference = [[ 'geonode:lembang_schools', { 'layer_type': 'vector', 'category': 'exposure', 'subcategory': 'building', 'title': 'lembang_schools' } ], [ 'geonode:shakemap_padang_20090930', { 'layer_type': 'raster', 'category': 'hazard', 'subcategory': 'earthquake', 'title': 'shakemap_padang_20090930' } ]] for entry in reference: name, mdblock = entry i = [x[0] for x in metadata].index(name) msg = 'Got name %s, expected %s' % (name, metadata[i][0]) assert name == metadata[i][0], msg for key in entry[1]: refval = entry[1][key] val = metadata[i][1][key] msg = ('Got value "%s" for key "%s" ' 'Expected "%s"' % (val, key, refval)) assert refval == val, msg # Check plugins are returned annotated_plugins = [{ 'name': name, 'doc': f.__doc__, 'layers': compatible_layers(f, metadata) } for name, f in plugin_list.items()] msg = 'No compatible layers returned' assert len(annotated_plugins) > 0, msg