def validate_aggregate(): # validate aggregate agg_by = request.args.get('aggregate_by') agg_values = request.args.get('aggregate_values') agg_list = ['day', 'week', 'quarter', 'month', 'year', 'adm1', 'adm2'] if agg_values: if agg_values.lower() not in ['true', 'false']: raise Error( "aggregate_values parameter must be either true or false") agg_values = eval(agg_values.title()) if agg_by and not agg_values: raise Error( "aggregate_values parameter must be true in order to aggregate data" ) if agg_values and not agg_by: raise Error("if aggregate_values is TRUE, aggregate_by parameter must be specified " \ "as one of: {}".format(", ".join(agg_list))) if agg_values and agg_by and agg_by.lower() not in agg_list: raise Error("aggregate_by must be specified as one of: {} ".format( ", ".join(agg_list)))
def calc_stats(geojson, request, geostore_id): """Given an input geojson and (optionally) some params (period, agg_by, etc), calculate the # of alerts in an AOI""" geom = shape(geojson['features'][0]['geometry']) geom_area_ha = tile_geometry.calc_area(geom, proj='aea') # check if it's too big to send to raster analysis # current cutoff is 10,000,000 ha, or about the size of Kentucky if geom_area_ha > 10000000: logging.info("Geometry is larger than 10 million ha. Tiling request") # simplify geometry if it's large if sys.getsizeof(json.dumps(geojson)) > 100000: if geom.geom_type == 'Polygon': geom = geom.simplify(0.05).buffer(0).exterior else: geom = geom.simplify(0.05).buffer(0) # seems unneccesary but important for cases where buffer or exterior # give us a non-polygon shape geom = Polygon(geom) if geom.type == 'Polygon' else MultiPolygon(geom) # find all tiles that intersect the aoi, calculating a proportion of overlap for each tile_dict = tile_geometry.build_tile_dict(geom) # insert intersect list into mbtiles database as tiles_aoi conn, cursor = sqlite_util.connect() sqlite_util.insert_intersect_table(cursor, tile_dict) # query the database for summarized results for our AOI rows = sqlite_util.select_within_tiles(cursor, request) # aggregate as necessary into week/month/year alerts_dict = aggregate_response.format_alerts_geom(rows, request) else: logging.info('geometry is < 10 million ha. Passing to raster lambda function ') url = 'https://0kepi1kf41.execute-api.us-east-1.amazonaws.com/dev/glad-alerts' kwargs = {'json': {'geojson': geojson}, 'headers': {'Content-Type': 'application/json'}, 'params': request.args.to_dict()} try: r = requests.post(url, timeout=28, **kwargs) except requests.exceptions.Timeout: raise Error('Request timed out - try splitting the simplifying the polygon or ' \ 'splitting it into multiple requests.') resp = r.json() if 'errors' in resp.keys(): raise Error(resp['errors'][0]['detail']) alerts_dict = resp['data']['attributes']['value'] return serialize_response(request, alerts_dict, geom_area_ha, geostore_id)
def wrapper(*args, **kwargs): if request.method == 'GET': geostore_id = request.args.get('geostore') use_type = request.view_args.get('use_type') wdpa_id = request.view_args.get('wdpa_id') # if it's a GET request, we know it has to have # either geostore ID, use ID, or wdpa ID # we'll use the geostore to look up each of these geometries # so build the geostore URI accordingly if geostore_id: geostore_uri = '/v2/geostore/{}'.format(geostore_id) # convert use_type & use_id into a geostore ID # important for serialization - that's how we'll build a download url elif use_type: use_id = request.view_args.get('use_id') geostore_uri = '/v2/geostore/use/{}/{}'.format(use_type, use_id) # no longer need use_id or use_type - have already converted # these to a geostore_uri, which will then give us geojson # and geostore_id del kwargs['use_id'], kwargs['use_type'] elif wdpa_id: geostore_uri = '/v2/geostore/wdpa/{}'.format(wdpa_id) del kwargs['wdpa_id'] else: raise Error('Geostore or geojson must be set') # grab the geojson from the geostore geostore_query = util.query_microservice(geostore_uri) kwargs["geostore_id"] = geostore_query['data']['id'] geojson = geostore_query['data']['attributes']['geojson'] # if it's a POST, we should find the geojson in the `geojson` property of the body elif request.method == 'POST': geojson = request.get_json().get('geojson', None) if request.get_json() else None if not geojson: raise Error('Geostore or geojson must be set') # add geojson variable to kwargs so it's accessible in our routes kwargs["geojson"] = geojson return func(*args, **kwargs)
def format_alerts_admin(request, glad_alerts): # take the glad alerts format and strip out some attributes agg_by = request.args.get('aggregate_by', None) for elem in glad_alerts['data']['attributes']['value']: if 'alerts' not in elem: raise Error('Inner API call returned no alerts.', 500) if len(glad_alerts['data']['attributes']['value']) == 1: return glad_alerts['data']['attributes']['value'][0]['alerts'] else: formatted_alerts = [] for d in glad_alerts['data']['attributes']['value']: alerts_dict = {} alerts_dict['count'] = d['alerts'] if agg_by: alerts_dict[agg_by] = d[agg_by] if agg_by != 'year' and agg_by in [ 'day', 'week', 'quarter', 'month' ]: alerts_dict['year'] = d['year'] formatted_alerts.append(alerts_dict) return formatted_alerts
def query_microservice(uri): config_alerts = {'uri': uri, 'method': 'GET'} response = request_to_microservice(config_alerts) if response.get('errors'): raise Error(**response['errors'][0]) else: return response
def connect(dbname=None): # useful for testing with our demo db sqlite_db = dbname if dbname else os.path.join(data_dir, 'stats.db') if not os.path.exists(sqlite_db): raise Error('{} does not exist. Dockerfile has download code'.format( sqlite_db)) conn = sqlite3.connect(sqlite_db) cursor = conn.cursor() return conn, cursor
def wrapper(*args, **kwargs): # grab geojson from our function parameters geojson = kwargs['geojson'] # check if it's a featurecollection or a feature gj_type = geojson.get('type') if not gj_type: raise Error('Invalid geojson- must have type property') # convert to a featurecollection - that's our preferred format if gj_type.lower() == 'feature': geojson = {"type": "FeatureCollection", "features": [geojson]} # if it's a featurecollection, just make sure that it has features elif gj_type.lower() == 'featurecollection': if not geojson.get('features'): raise Error('feature collection must have features object') else: raise Error( 'input geojson must be of type feature collection or feature') if len(geojson['features']) > 1: raise Error('input geojson must have only one feature') try: geom = geojson['features'][0]['geometry'] geom_type = geom['type'] geom_coords = geom['coordinates'] except: raise Error( 'Invalid geojson - geometry does not have proper type or coordinates objects' ) if geojson['features'][0]['geometry']['type'].lower() not in [ 'polygon', 'multipolygon' ]: raise Error( 'input geojson must be of geometry type polygon or multipolygon' ) # if all else passes, try converting it to a shapely shape try: shape(geom) except: raise Error( 'Error converting input geometry into shapely object; check input geojson' ) # return updated geojson to our function kwargs['geojson'] = geojson return func(*args, **kwargs)
def validate_period(): # validate period today = datetime.datetime.now() period = request.args.get('period', None) minYear = 2015 if period: if len(period.split(',')) < 2: raise Error("Period needs 2 arguments") else: if '"' in period or "'" in period: raise Error( "Incorrect format, should be YYYY-MM-DD,YYYY-MM-DD (no quotes)" ) period_from = period.split(',')[0] period_to = period.split(',')[1] try: period_from = datetime.datetime.strptime( period_from, '%Y-%m-%d') period_to = datetime.datetime.strptime(period_to, '%Y-%m-%d') except ValueError: raise Error( "Incorrect format, should be YYYY-MM-DD,YYYY-MM-DD") if period_from.year < minYear: raise Error("Start date can't be earlier than {}-01-01".format( minYear)) if period_to.year > today.year: raise Error("End year can't be later than {}".format( today.year)) if period_from > period_to: raise Error('Start date must be less than end date')