def availableFunctions(theKeywordList=None): """ Query the inasafe engine to see what plugins are available. Args: theKeywordList - an optional parameter which should contain a list of 2 dictionaries (the number of items in the list is not enforced). The dictionaries should be obtained by using readKeywordsFromFile e.g.:: myFile1 = foo.shp myFile2 = bar.asc myKeywords1 = readKeywordsFromFile(myFile1) myKeywords2 = readKeywordsFromFile(myFile2) myList = [myKeywords1, myKeywords2] myFunctions = availableFunctions(myList) Returns: A dictionary of strings where each is a plugin name. .. note:: If theKeywordList is not provided, all available plugins will be returned in the list. Raises: NoFunctionsFoundError if no functions are found. """ try: myDict = get_admissible_plugins(theKeywordList) #if len(myDict) < 1: # myMessage = 'No InaSAFE impact functions could be found' # raise NoFunctionsFoundError(myMessage) return myDict except: raise
def availableFunctions(theKeywordList=None): """ Query the inasafe engine to see what plugins are available. Args: theKeywordList - an optional parameter which should contain a list of 2 dictionaries (the number of items in the list is not enforced). The dictionaries should be obtained by using readKeywordsFromFile e.g.:: myFile1 = foo.shp myFile2 = bar.asc keywords1 = readKeywordsFromFile(myFile1) keywords2 = readKeywordsFromFile(myFile2) myList = [keywords1, keywords2] myFunctions = availableFunctions(myList) Returns: A dictionary of strings where each is a plugin name. .. note:: If theKeywordList is not provided, all available plugins will be returned in the list. Raises: NoFunctionsFoundError if no functions are found. """ try: myDict = get_admissible_plugins(theKeywordList) #if len(myDict) < 1: # message = 'No InaSAFE impact functions could be found' # raise NoFunctionsFoundError(message) return myDict except: raise
def debug(request): """Show a list of all the functions""" plugin_list = get_admissible_plugins() plugins_info = [] for name, f in plugin_list.items(): if not 'doc' in request.GET: plugins_info.append({ 'name': name, 'location': f.__module__, }) else: plugins_info.append({ 'name': name, 'location': f.__module__, 'doc': f.__doc__, }) output = {'plugins': plugins_info} jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes(haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % ( impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def questions(request): """Get a list of all the questions, layers and functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 gs = request.GET['geoservers'].split(',') geoservers = [{'url': g, 'version': '1.0.0'} for g in gs] else: geoservers = get_servers(request.user) layers = {} functions = {} for geoserver in geoservers: layers.update(get_metadata(geoserver['url'])) admissible_plugins = get_admissible_plugins() for name, f in admissible_plugins.items(): functions[name] = {'doc': f.__doc__, } for key in ['author', 'title', 'rating']: if hasattr(f, key): functions[name][key] = getattr(f, key) output = {'layers': layers, 'functions': functions} hazards = [] exposures = [] # First get the list of all hazards and exposures for name, params in layers.items(): keywords = params['keywords'] if 'category' in keywords: if keywords['category'] == 'hazard': hazards.append(name) elif keywords['category'] == 'exposure': exposures.append(name) questions = [] # Then iterate over hazards and exposures to find 3-tuples of hazard, exposure and functions for hazard in hazards: for exposure in exposures: hazard_keywords = layers[hazard]['keywords'] exposure_keywords = layers[exposure]['keywords'] hazard_keywords['layertype'] = layers[hazard]['layertype'] exposure_keywords['layertype'] = layers[exposure]['layertype'] keywords = [hazard_keywords, exposure_keywords] plugins = get_admissible_plugins(keywords=keywords) for function in plugins: questions.append({'hazard': hazard, 'exposure': exposure, 'function': function}) output['questions'] = questions jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def get(self): # try: exposure_id = self.get_argument('e') hazard_id = self.get_argument('h') impact_name = 'impact-e%s-h%s' % (exposure_id, hazard_id) if exposure_id and hazard_id: # First check if the impact already exists in the cache try: # try to connect to the redis cache redis_server = redis.Redis() cache = True print 'Successfully connected to redis!' except: # This is just a flag that will be used later on print "I couldn't connect to redis" cache = False else: # If the impact exists, get it from the cache and return if redis_server.exists(impact_name): print 'Entry exists in cache!' writeout = redis_server.get(impact_name) self.set_header('Content-Type', 'application/javascript') self.write(writeout) return # Query the db and calculate if it doesn't try: #try connecting to the pg database conn = psycopg2.connect( "dbname='dev' user='******' password='******'") print 'Successfully connected to postgres!' except: writeout = 'Could not connect to the database!' else: # create a cursor cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) try: #1. Query the db for the layers query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % exposure_id cursor.execute(query) exposure = cursor.fetchone() query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % hazard_id cursor.execute(query) hazard = cursor.fetchone() except: writeout = 'There was something wrong with your query' conn.rollback() else: if exposure and hazard: # Pass the shapefile (paths) to read_layer try: exposure_layer = read_layer(exposure['shapefile']) hazard_layer = read_layer(hazard['shapefile']) except: writeout = 'Something went wrong when reading the layers' # Keywords exposure_dict = exposure_layer.get_keywords() hazard_dict = hazard_layer.get_keywords() if exposure_layer.is_vector: exposure_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' if hazard_layer.is_vector: hazard_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' #get optimal bounding box common_bbox = bbox_intersection( exposure_layer.get_bounding_box(), hazard_layer.get_bounding_box()) print exposure_layer.get_bounding_box() print hazard_layer.get_bounding_box() bbox_string = '' try: for val in common_bbox: bbox_string += str(val) + ' ' except: writeout = 'The layers have no intersection!' else: #gdal clip dest = 'hazard_tmp.shp' src = hazard_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip hazard' else: print 'created clipped hazard. Reading layer now.' try: clipped_hazard = read_layer("hazard_tmp.shp") except: print 'something went wrong when reading the clipped hazard' else: print clipped_hazard dest = 'exposure_tmp.shp' src = exposure_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip exposure' else: print 'created clipped exposure. Reading layer now.' try: clipped_exposure = read_layer( "exposure_tmp.shp") except: print 'something went wrong when reading the clipped exposure' else: print clipped_exposure #get impact function based on layer keywords fncs = get_admissible_plugins( [hazard_dict, exposure_dict]) impact_fnc = fncs.values()[0] layers = [clipped_hazard, clipped_exposure] # Call calculate_impact impact_file = calculate_impact( layers, impact_function) tmpfile = 'tmp%s.json' % impact_name #5. Serialize the output into json and write out # Convert the impact file into a json file call([ 'ogr2ogr', '-f', 'GeoJSON', tmpfile, impact_file.filename ]) # Open the json file f = open(tmpfile) #FIXME: Something needs to be done about the encoding # Load the file as json json_data = json.loads(f.read(), ) # Write it out as json writeout = json.dumps(json_data, ) #close the file, and delete temporary files f.close() os.remove(tmpfile) os.remove("hazard_tmp.shp") os.remove("exposure_tmp.shp") #os.remove(impact_file.filename) #6. Cache if cache: redis_server.set(impact_name, writeout) #use setex to add a cache expiry #writeout = json.dumps(impact_file.data, encoding='latin-1') else: writeout = 'Sorry, your query returned one or' + \ ' more empty matches' # except: # writeout = 'Something went wrong! Hmmm...' self.set_header('Content-Type', 'application/javascript') self.write(writeout)
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes( haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % (impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def questions(request): """Get a list of all the questions, layers and functions Will provide a list of plugin functions and the layers that the plugins will work with. Takes geoserver urls as a GET parameter can have a comma separated list e.g. http://127.0.0.1:8000/riab/api/v1/functions/?geoservers=http:... assumes version 1.0.0 """ if 'geoservers' in request.GET: # FIXME for the moment assume version 1.0.0 gs = request.GET['geoservers'].split(',') geoservers = [{'url': g, 'version': '1.0.0'} for g in gs] else: geoservers = get_servers(request.user) layers = {} functions = {} for geoserver in geoservers: layers.update(get_metadata(geoserver['url'])) admissible_plugins = get_admissible_plugins() for name, f in admissible_plugins.items(): functions[name] = { 'doc': f.__doc__, } for key in ['author', 'title', 'rating']: if hasattr(f, key): functions[name][key] = getattr(f, key) output = {'layers': layers, 'functions': functions} hazards = [] exposures = [] # First get the list of all hazards and exposures for name, params in layers.items(): keywords = params['keywords'] if 'category' in keywords: if keywords['category'] == 'hazard': hazards.append(name) elif keywords['category'] == 'exposure': exposures.append(name) questions = [] # Then iterate over hazards and exposures to find 3-tuples of hazard, exposure and functions for hazard in hazards: for exposure in exposures: hazard_keywords = layers[hazard]['keywords'] exposure_keywords = layers[exposure]['keywords'] hazard_keywords['layertype'] = layers[hazard]['layertype'] exposure_keywords['layertype'] = layers[exposure]['layertype'] keywords = [hazard_keywords, exposure_keywords] plugins = get_admissible_plugins(keywords=keywords) for function in plugins: questions.append({ 'hazard': hazard, 'exposure': exposure, 'function': function }) output['questions'] = questions jsondata = json.dumps(output) return HttpResponse(jsondata, mimetype='application/json')
def get(self): # try: exposure_id = self.get_argument('e') hazard_id = self.get_argument('h') impact_name = 'impact-e%s-h%s' % (exposure_id, hazard_id) if exposure_id and hazard_id: # First check if the impact already exists in the cache try: # try to connect to the redis cache redis_server = redis.Redis() cache = True print 'Successfully connected to redis!' except: # This is just a flag that will be used later on print "I couldn't connect to redis" cache = False else: # If the impact exists, get it from the cache and return if redis_server.exists(impact_name): print 'Entry exists in cache!' writeout = redis_server.get(impact_name) self.set_header('Content-Type', 'application/javascript') self.write(writeout) return # Query the db and calculate if it doesn't try: #try connecting to the pg database conn = psycopg2.connect( "dbname='dev' user='******' password='******'" ) print 'Successfully connected to postgres!' except: writeout = 'Could not connect to the database!' else: # create a cursor cursor = conn.cursor( cursor_factory = psycopg2.extras.DictCursor ) try: #1. Query the db for the layers query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % exposure_id cursor.execute(query) exposure = cursor.fetchone() query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % hazard_id cursor.execute(query) hazard = cursor.fetchone() except: writeout = 'There was something wrong with your query' conn.rollback() else: if exposure and hazard: # Pass the shapefile (paths) to read_layer try: exposure_layer = read_layer(exposure['shapefile']) hazard_layer = read_layer(hazard['shapefile']) except: writeout = 'Something went wrong when reading the layers' # Keywords exposure_dict = exposure_layer.get_keywords() hazard_dict = hazard_layer.get_keywords() if exposure_layer.is_vector: exposure_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' if hazard_layer.is_vector: hazard_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' #get optimal bounding box common_bbox = bbox_intersection( exposure_layer.get_bounding_box(), hazard_layer.get_bounding_box() ) print exposure_layer.get_bounding_box() print hazard_layer.get_bounding_box() bbox_string = '' try: for val in common_bbox: bbox_string += str(val) + ' ' except: writeout = 'The layers have no intersection!' else: #gdal clip dest = 'hazard_tmp.shp' src = hazard_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip hazard' else: print 'created clipped hazard. Reading layer now.' try: clipped_hazard = read_layer("hazard_tmp.shp") except: print 'something went wrong when reading the clipped hazard' else: print clipped_hazard dest = 'exposure_tmp.shp' src = exposure_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip exposure' else: print 'created clipped exposure. Reading layer now.' try: clipped_exposure = read_layer("exposure_tmp.shp") except: print 'something went wrong when reading the clipped exposure' else: print clipped_exposure #get impact function based on layer keywords fncs = get_admissible_plugins([hazard_dict, exposure_dict]) impact_fnc = fncs.values()[0] layers = [clipped_hazard, clipped_exposure] # Call calculate_impact impact_file = calculate_impact( layers, impact_function ) tmpfile = 'tmp%s.json' % impact_name #5. Serialize the output into json and write out # Convert the impact file into a json file call(['ogr2ogr', '-f', 'GeoJSON', tmpfile, impact_file.filename]) # Open the json file f = open(tmpfile) #FIXME: Something needs to be done about the encoding # Load the file as json json_data = json.loads( f.read(), ) # Write it out as json writeout = json.dumps( json_data, ) #close the file, and delete temporary files f.close() os.remove(tmpfile) os.remove("hazard_tmp.shp") os.remove("exposure_tmp.shp") #os.remove(impact_file.filename) #6. Cache if cache: redis_server.set(impact_name, writeout) #use setex to add a cache expiry #writeout = json.dumps(impact_file.data, encoding='latin-1') else: writeout = 'Sorry, your query returned one or' + \ ' more empty matches' # except: # writeout = 'Something went wrong! Hmmm...' self.set_header('Content-Type', 'application/javascript') self.write(writeout)