def get_geojson_cyclone(request, iso_alpha3=None): collection = None with GeoDashDatabaseConnection() as geodash_conn: # Admin 2 Districts collection = data_local_country_admin().get(cursor=geodash_conn.cursor, iso_alpha3=iso_alpha3, level=2) # Vam Data vam_by_admin1 = get_vam_by_admin1(request, iso_alpha3=iso_alpha3) # Population at Risk Data rows_popatrisk = geodash_conn.exec_query_multiple( get_template("sparc2/sql/_cyclone.sql").render({ 'admin2_popatrisk': 'cyclone.admin2_popatrisk', 'iso_alpha3': iso_alpha3 })) popatrisk_by_admin2 = {} for row in rows_popatrisk: newRow = json.loads(row[0]) if (type(row[0]) is not dict) else row[0] admin2_code = newRow.pop(u"admin2_code", None) newRow.pop(u"iso3", None) if admin2_code not in popatrisk_by_admin2: popatrisk_by_admin2[admin2_code] = [] popatrisk_by_admin2[admin2_code].append(newRow) context_by_admin2 = get_context_by_admin2(geodash_conn=geodash_conn, iso_alpha3=iso_alpha3) for feature in collection["features"]: admin1_code = str(feature["properties"]["admin1_code"]) admin2_code = str(feature["properties"]["admin2_code"]) if admin2_code in context_by_admin2: feature["properties"]["ldi"] = context_by_admin2[admin2_code][ "ldi"] feature["properties"]["delta_negative"] = context_by_admin2[ admin2_code]["delta_negative"] feature["properties"][ "erosion_propensity"] = context_by_admin2[admin2_code][ "erosion_propensity"] feature["properties"].update({ "FCS": 0, "FCS_border": 0, "FCS_acceptable": 0, "CSI_no": 0, "CSI_low": 0, "CSI_med": 0, "CSI_high": 0 }) feature["properties"]["addinfo"] = popatrisk_by_admin2[ admin2_code] if admin2_code in popatrisk_by_admin2 else [] if admin1_code in vam_by_admin1: feature["properties"].update(vam_by_admin1[admin1_code]) return collection
def get_geojson_vam(request, iso_alpha3=None): collection = None with GeoDashDatabaseConnection() as geodash_conn: collection = data_local_country_admin().get(cursor=geodash_conn.cursor, iso_alpha3=iso_alpha3, level=1) for feature in collection["features"]: response = requests.get(url=URL_VAM["FCS"].format( admin0=feature["properties"]["admin0_code"], admin1=feature["properties"]["admin1_code"])) vam_data_fcs = response.json() response = requests.get(url=URL_VAM["CSI"].format( admin0=feature["properties"]["admin0_code"], admin1=feature["properties"]["admin1_code"])) vam_data_csi = response.json() vam = {} if vam_data_fcs: vam_data_fcs = vam_data_fcs[0] vam["fcs"] = { "year": vam_data_fcs["FCS_year"], "month": vam_data_fcs["FCS_month"], "source": vam_data_fcs["FCS_dataSource"], "poor": vam_data_fcs["FCS_poor"], "borderline": vam_data_fcs["FCS_borderline"], "acceptable": vam_data_fcs["FCS_acceptable"] } if vam_data_csi: vam_data_csi = vam_data_csi[0] vam["csi"] = { "year": vam_data_csi["CSI_rYear"], "month": vam_data_csi["CSI_rMonth"], "source": vam_data_csi["CSI_rDataSource"], "no": vam_data_csi["CSI_rNoCoping"], "low": vam_data_csi["CSI_rLowCoping"], "medium": vam_data_csi["CSI_rMediumCoping"], "high": vam_data_csi["CSI_rHighCoping"] } feature["properties"]["vam"] = vam return collection
def get_geojson_context(request, iso_alpha3=None): collection = None with GeoDashDatabaseConnection() as geodash_conn: collection = data_local_country_admin().get(cursor=geodash_conn.cursor, iso_alpha3=iso_alpha3, level=2) rows_context = geodash_conn.exec_query_multiple( get_template("sparc2/sql/_context.sql").render({ 'admin2_context': 'context.admin2_context', 'iso_alpha3': iso_alpha3 })) if "features" in collection: for feature in collection["features"]: for row_context in rows_context: json_context = json.loads(row_context[0]) if (type( row_context[0]) is not dict) else row_context[0] if int(json_context["admin2_code"] ) == feature["properties"]["admin2_code"]: feature["properties"].update(json_context) return collection
def get_summary_drought(table_popatrisk=None, iso_alpha3=None): now = datetime.datetime.now() current_month = now.strftime("%b").lower() if (not table_popatrisk) or (not iso_alpha3): raise Exception("Missing table_popatrisk or iso3 for get_summary_drought.") summary = None prob_classes = [ { "min": 0.01, "max": .05, "label": "1-5%" }, { "min": .06, "max": .10, "label": "5-10%" }, { "min": .11, "max": .19, "label": "10-20%" }, { "min": .2, "max": 1.0, "label": "20-100%" } ] values_all = [] values_float = [] natural = [] natural_adjusted = [] values_by_prob_class = {} values_by_prob_class_by_month = {} values_by_admin2_by_prob_class_by_month = {} with GeoDashDatabaseConnection() as geodash_conn: for prob_class in prob_classes: values = geodash_conn.exec_query_single_aslist( get_template("sparc2/sql/_drought_data_all_at_admin2_for_probclass.sql").render({ 'admin2_popatrisk': table_popatrisk, 'prob_min': prob_class["min"], 'prob_max': prob_class["max"], 'iso_alpha3': iso_alpha3})) values_by_prob_class[prob_class['label']] = values for prob_class in prob_classes: values_all = values_all + values_by_prob_class[prob_class['label']] values_float = [float(x) for x in values_all] num_breakpoints = 5 natural = calc_breaks_natural(values_float, num_breakpoints) natural_adjusted = natural for prob_class in prob_classes: rows = geodash_conn.exec_query_multiple( get_template("sparc2/sql/_drought_data_all_at_admin2_by_month_for_probclass.sql").render({ 'admin2_popatrisk': table_popatrisk, 'prob_min': prob_class["min"], 'prob_max': prob_class["max"], 'iso_alpha3': iso_alpha3})) if prob_class["label"] not in values_by_prob_class_by_month: values_by_prob_class_by_month[prob_class["label"]] = {} for r in rows: month, value = r values_by_prob_class_by_month[prob_class["label"]][month] = value rows = geodash_conn.exec_query_multiple( get_template("sparc2/sql/_drought_data_by_admin2_by_month_for_probclass.sql").render({ 'admin2_popatrisk': table_popatrisk, 'prob_min': prob_class["min"], 'prob_max': prob_class["max"], 'iso_alpha3': iso_alpha3})) for r in rows: admin2_code, month, value = r if admin2_code not in values_by_admin2_by_prob_class_by_month: values_by_admin2_by_prob_class_by_month[admin2_code] = {} if prob_class["label"] not in values_by_admin2_by_prob_class_by_month[admin2_code]: values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]] = {} values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]][month] = value max_values = 0 if len(values_float) != 0: max_values = int(max(values_float)) summary = { 'all': { "max": { 'at_country_month': None, 'at_admin2_month': max_values }, 'breakpoints': { 'natural': natural, 'natural_adjusted': [0] + natural_adjusted } }, "prob_class": {}, "admin2": {} } for prob_class in prob_classes: if prob_class["label"] not in summary["prob_class"]: summary["prob_class"][prob_class["label"]] = { "by_month": [] } summary["prob_class"][prob_class["label"]]["by_month"] = [values_by_prob_class_by_month[prob_class["label"]].get(x, 0) for x in MONTHS_SHORT3] for admin2_code in values_by_admin2_by_prob_class_by_month: for prob_class in values_by_admin2_by_prob_class_by_month[admin2_code]: values_by_month = [values_by_admin2_by_prob_class_by_month[admin2_code][prob_class].get(x, 0) for x in MONTHS_SHORT3] summary["admin2"] = insertIntoObject( summary["admin2"], [admin2_code, "prob_class", prob_class, 'by_month'], values_by_month) summary['header'] = { 'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]), 'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]), 'admin2': len(summary["admin2"].keys()), 'prob_classes': prob_classes } return summary
def get_summary_cyclone(table_popatrisk=None, iso_alpha3=None): now = datetime.datetime.now() current_month = now.strftime("%b").lower() if (not table_popatrisk) or (not iso_alpha3): raise Exception( "Missing table_popatrisk or iso_alpha3 for get_summary_cyclone.") summary = None with GeoDashDatabaseConnection() as geodash_conn: values = geodash_conn.exec_query_single_aslist( get_template("sparc2/sql/_cyclone_data_all_at_admin2.sql").render({ 'admin2_popatrisk': table_popatrisk, 'iso_alpha3': iso_alpha3 })) values = [float(x) for x in values] num_breakpoints = 5 natural = calc_breaks_natural(values, num_breakpoints) natural_adjusted = natural summary = { 'all': { "max": { 'at_country_month': None, 'at_admin2_month': None }, 'breakpoints': { 'natural': natural, 'natural_adjusted': [0] + natural_adjusted } }, "prob_class": {}, "admin2": {} } max_value = 0 if len(values) != 0: max_value = max(values) summary["all"]["max"]["at_admin2_month"] = max_value prob_classes = [ "0.01-0.1", "0.1-0.2", "0.2-0.3", "0.3-0.4", "0.4-0.5", "0.5-0.6", "0.6-0.7", "0.7-0.8", "0.8-0.9", "0.9-1.0" ] for prob_class in prob_classes: values = geodash_conn.exec_query_single_aslist( get_template( "sparc2/sql/_cyclone_data_by_prob_class_month.sql").render( { 'admin2_popatrisk': table_popatrisk, 'iso_alpha3': iso_alpha3, 'prob_class': prob_class })) summary["prob_class"][prob_class] = {} summary["prob_class"][prob_class]['by_month'] = [ float(x) for x in values ] rows = geodash_conn.exec_query_multiple( get_template( "sparc2/sql/_cyclone_data_by_group_prob_class_month.sql"). render({ 'admin2_popatrisk': table_popatrisk, 'iso_alpha3': iso_alpha3, 'prob_class': prob_class, 'group': 'admin2_code' })) for row in rows: admin2_code, values = row if admin2_code not in summary["admin2"]: summary["admin2"][admin2_code] = {"prob_class": {}} if "prob_class" not in summary["admin2"][admin2_code]: summary["admin2"][admin2_code]["prob_class"] = {} if prob_class not in summary["admin2"][admin2_code][ "prob_class"]: summary["admin2"][admin2_code]["prob_class"][ prob_class] = {} summary["admin2"][admin2_code]["prob_class"][prob_class][ 'by_month'] = [float(x) for x in values.split(",")] summary['header'] = { 'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]), 'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]), 'admin2': len(summary["admin2"].keys()), 'prob_classes': prob_classes } return summary
def get_geojson_landslide(request, iso_alpha3=None): collection = None prob_classes = [{ "code": 1, "label": "low" }, { "code": 2, "label": "medium" }, { "code": 3, "label": "high" }, { "code": 4, "label": "very_high" }] with GeoDashDatabaseConnection() as geodash_conn: # Admin 2 Districts collection = data_local_country_admin().get(cursor=geodash_conn.cursor, iso_alpha3=iso_alpha3, level=2) # Vam Data vam_by_admin1 = get_vam_by_admin1(request, iso_alpha3=iso_alpha3) # Context Data context_by_admin2 = get_context_by_admin2(geodash_conn=geodash_conn, iso_alpha3=iso_alpha3) # Population at Risk Data popatrisk_by_admin2_probclass_month = {} for prob_class in prob_classes: rows_popatrisk = geodash_conn.exec_query_multiple( get_template( "sparc2/sql/_landslide_data_by_admin2_for_probclass.sql"). render({ 'admin2_popatrisk': 'landslide.admin2_popatrisk', 'prob_min': str(prob_class["code"]), 'prob_max': str(prob_class["code"]), 'iso_alpha3': iso_alpha3 })) for r in rows_popatrisk: admin2_code, month, value = r if admin2_code not in popatrisk_by_admin2_probclass_month: popatrisk_by_admin2_probclass_month[admin2_code] = {} if prob_class[ "label"] not in popatrisk_by_admin2_probclass_month[ admin2_code]: popatrisk_by_admin2_probclass_month[admin2_code][ prob_class["label"]] = {} popatrisk_by_admin2_probclass_month[admin2_code][ prob_class["label"]][month] = value for feature in collection["features"]: admin1_code = str(feature["properties"]["admin1_code"]) admin2_code = str(feature["properties"]["admin2_code"]) if admin2_code in context_by_admin2: feature["properties"]["ldi"] = context_by_admin2[admin2_code][ "ldi"] feature["properties"]["delta_negative"] = context_by_admin2[ admin2_code]["delta_negative"] feature["properties"][ "erosion_propensity"] = context_by_admin2[admin2_code][ "erosion_propensity"] feature["properties"].update({ "FCS": 0, "FCS_border": 0, "FCS_acceptable": 0, "CSI_no": 0, "CSI_low": 0, "CSI_med": 0, "CSI_high": 0 }) if admin1_code in vam_by_admin1: feature["properties"].update(vam_by_admin1[admin1_code]) feature["properties"]["addinfo"] = [] for prob_class in prob_classes: includeX = False x = { "prob_class": prob_class["label"], "prob_class_min": str(prob_class["code"]), "prob_class_max": str(prob_class["code"]) } for month in MONTHS_SHORT3: if admin2_code in popatrisk_by_admin2_probclass_month: if prob_class[ "label"] in popatrisk_by_admin2_probclass_month[ admin2_code]: x[month] = popatrisk_by_admin2_probclass_month[ admin2_code][prob_class["label"]].get( month, 0) includeX = True if includeX: feature["properties"]["addinfo"].append(x) return collection
def get_geojson_flood(request, iso_alpha3=None): collection = None with GeoDashDatabaseConnection() as geodash_conn: # Admin 2 Districts collection = data_local_country_admin().get(cursor=geodash_conn.cursor, iso_alpha3=iso_alpha3, level=2) # Vam Data vam_by_admin1 = get_vam_by_admin1(request, iso_alpha3=iso_alpha3) # Population at Risk Data returnPeriods = [25, 50, 100, 200, 500, 1000] for rp in returnPeriods: rows = geodash_conn.exec_query_multiple( get_template( "sparc2/sql/_flood_data_by_admin2_rp_month_asjson.sql"). render({ 'admin2_popatrisk': 'flood.admin2_popatrisk', 'iso_alpha3': iso_alpha3, 'rp': rp })) values_by_admin2 = {} for row in rows: admin2_code, data = row data.pop(u"admin2_code") values_by_admin2[str(admin2_code)] = data context_by_admin2 = get_context_by_admin2( geodash_conn=geodash_conn, iso_alpha3=iso_alpha3) for feature in collection["features"]: admin1_code = str(feature["properties"]["admin1_code"]) admin2_code = str(feature["properties"]["admin2_code"]) values = values_by_admin2.get(admin2_code, None) if values: feature["properties"]["RP" + str(rp)] = values else: feature["properties"]["RP" + str(rp)] = { x: 0 for x in MONTHS_SHORT3 } if admin2_code in context_by_admin2: feature["properties"]["ldi"] = context_by_admin2[ admin2_code]["ldi"] feature["properties"]["delta_mean"] = context_by_admin2[ admin2_code]["delta_mean"] feature["properties"][ "delta_positive"] = context_by_admin2[admin2_code][ "delta_positive"] feature["properties"][ "delta_negative"] = context_by_admin2[admin2_code][ "delta_negative"] feature["properties"]["delta_crop"] = context_by_admin2[ admin2_code]["delta_crop"] feature["properties"]["delta_forest"] = context_by_admin2[ admin2_code]["delta_forest"] feature["properties"][ "erosion_propensity"] = context_by_admin2[admin2_code][ "erosion_propensity"] if admin1_code in vam_by_admin1: feature["properties"].update(vam_by_admin1[admin1_code]) return collection