Exemplo n.º 1
0
def get_summary_drought(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso3 for get_summary_drought.")

    summary = None

    prob_classes = [
      { "min": 0.01, "max": .05, "label": "1-5%" },
      { "min": .06, "max": .10, "label": "5-10%" },
      { "min": .11, "max": .19, "label": "10-20%" },
      { "min": .2, "max": 1.0, "label": "20-100%" }
    ]

    values_all = []
    values_float = []
    natural = []
    natural_adjusted = []
    values_by_prob_class = {}
    values_by_prob_class_by_month = {}
    values_by_admin2_by_prob_class_by_month = {}
    with GeoDashDatabaseConnection() as geodash_conn:
        for prob_class in prob_classes:
            values = geodash_conn.exec_query_single_aslist(
                get_template("sparc2/sql/_drought_data_all_at_admin2_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            values_by_prob_class[prob_class['label']] = values

        for prob_class in prob_classes:
            values_all = values_all + values_by_prob_class[prob_class['label']]

        values_float = [float(x) for x in values_all]
        num_breakpoints = 5
        natural = calc_breaks_natural(values_float, num_breakpoints)
        natural_adjusted = natural

        for prob_class in prob_classes:
            rows = geodash_conn.exec_query_multiple(
                get_template("sparc2/sql/_drought_data_all_at_admin2_by_month_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            if prob_class["label"] not in values_by_prob_class_by_month:
                values_by_prob_class_by_month[prob_class["label"]] = {}
            for r in rows:
                month, value = r
                values_by_prob_class_by_month[prob_class["label"]][month] = value

            rows = geodash_conn.exec_query_multiple(
                get_template("sparc2/sql/_drought_data_by_admin2_by_month_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            for r in rows:
                admin2_code, month, value = r
                if admin2_code not in values_by_admin2_by_prob_class_by_month:
                    values_by_admin2_by_prob_class_by_month[admin2_code] = {}
                if prob_class["label"] not in values_by_admin2_by_prob_class_by_month[admin2_code]:
                    values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]] = {}
                values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]][month] = value


    max_values = 0
    if len(values_float) != 0:
        max_values = int(max(values_float))
    summary = {
        'all': {
            "max": {
              'at_country_month': None,
              'at_admin2_month': max_values 
            },
            'breakpoints': {
                'natural': natural,
                'natural_adjusted': [0] + natural_adjusted
            }
        },
        "prob_class": {},
        "admin2": {}
    }

    for prob_class in prob_classes:
        if prob_class["label"] not in summary["prob_class"]:
            summary["prob_class"][prob_class["label"]] = {
                "by_month": []
            }
        summary["prob_class"][prob_class["label"]]["by_month"] = [values_by_prob_class_by_month[prob_class["label"]].get(x, 0) for x in MONTHS_SHORT3]

    for admin2_code in values_by_admin2_by_prob_class_by_month:
        for prob_class in values_by_admin2_by_prob_class_by_month[admin2_code]:
            values_by_month = [values_by_admin2_by_prob_class_by_month[admin2_code][prob_class].get(x, 0) for x in MONTHS_SHORT3]
            summary["admin2"] = insertIntoObject(
                summary["admin2"],
                [admin2_code, "prob_class", prob_class, 'by_month'],
                values_by_month)

    summary['header'] = {
        'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2': len(summary["admin2"].keys()),
        'prob_classes': prob_classes
    }

    return summary
Exemplo n.º 2
0
def get_summary_drought(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso3 for get_summary_drought.")

    summary = None

    prob_classes = [
      { "min": 0.01, "max": .05, "label": "0.01-0.05" },
      { "min": .06, "max": .10, "label": "0.06-0.10" },
      { "min": .11, "max": .19, "label": "0.11-0.19" },
      { "min": .2, "max": 1.0, "label": "0.20-1.0" }
    ]

    values_all = []
    values_float = []
    natural = []
    natural_adjusted = []
    values_by_prob_class = {}
    values_by_prob_class_by_month = {}
    values_by_admin2_by_prob_class_by_month = {}
    with GeoDashDatabaseConnection() as geodash_conn:
        for prob_class in prob_classes:
            values = geodash_conn.exec_query_single_aslist(
                get_template("sparc2/sql/_drought_data_all_at_admin2_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            values_by_prob_class[prob_class['label']] = values

        for prob_class in prob_classes:
            values_all = values_all + values_by_prob_class[prob_class['label']]

        values_float = [float(x) for x in values_all]
        num_breakpoints = 5
        natural = calc_breaks_natural(values_float, num_breakpoints)
        natural_adjusted = natural

        for prob_class in prob_classes:
            rows = geodash_conn.exec_query_multiple(
                get_template("sparc2/sql/_drought_data_all_at_admin2_by_month_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            if prob_class["label"] not in values_by_prob_class_by_month:
                values_by_prob_class_by_month[prob_class["label"]] = {}
            for r in rows:
                month, value = r
                values_by_prob_class_by_month[prob_class["label"]][month] = value

            rows = geodash_conn.exec_query_multiple(
                get_template("sparc2/sql/_drought_data_by_admin2_by_month_for_probclass.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'prob_min': prob_class["min"],
                    'prob_max': prob_class["max"],
                    'iso_alpha3': iso_alpha3}))
            for r in rows:
                admin2_code, month, value = r
                if admin2_code not in values_by_admin2_by_prob_class_by_month:
                    values_by_admin2_by_prob_class_by_month[admin2_code] = {}
                if prob_class["label"] not in values_by_admin2_by_prob_class_by_month[admin2_code]:
                    values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]] = {}
                values_by_admin2_by_prob_class_by_month[admin2_code][prob_class["label"]][month] = value

    summary = {
        'all': {
            "max": {
              'at_country_month': None,
              'at_admin2_month': int(max(values_float))
            },
            'breakpoints': {
                'natural': natural,
                'natural_adjusted': [0] + natural_adjusted
            }
        },
        "prob_class": {},
        "admin2": {}
    }

    for prob_class in prob_classes:
        if prob_class["label"] not in summary["prob_class"]:
            summary["prob_class"][prob_class["label"]] = {
                "by_month": []
            }
        summary["prob_class"][prob_class["label"]]["by_month"] = [values_by_prob_class_by_month[prob_class["label"]].get(x, 0) for x in MONTHS_SHORT3]

    for admin2_code in values_by_admin2_by_prob_class_by_month:
        for prob_class in values_by_admin2_by_prob_class_by_month[admin2_code]:
            values_by_month = [values_by_admin2_by_prob_class_by_month[admin2_code][prob_class].get(x, 0) for x in MONTHS_SHORT3]
            summary["admin2"] = insertIntoObject(
                summary["admin2"],
                [admin2_code, "prob_class", prob_class, 'by_month'],
                values_by_month)

    summary['header'] = {
        'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2': len(summary["admin2"].keys()),
        'prob_classes': prob_classes
    }

    return summary
Exemplo n.º 3
0
def get_summary_cyclone(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception(
            "Missing table_popatrisk or iso_alpha3 for get_summary_cyclone.")

    summary = None

    with GeoDashDatabaseConnection() as geodash_conn:

        values = geodash_conn.exec_query_single_aslist(
            get_template("sparc2/sql/_cyclone_data_all_at_admin2.sql").render({
                'admin2_popatrisk':
                table_popatrisk,
                'iso_alpha3':
                iso_alpha3
            }))

        values = [float(x) for x in values]
        num_breakpoints = 5
        natural = calc_breaks_natural(values, num_breakpoints)
        natural_adjusted = natural

        summary = {
            'all': {
                "max": {
                    'at_country_month': None,
                    'at_admin2_month': None
                },
                'breakpoints': {
                    'natural': natural,
                    'natural_adjusted': [0] + natural_adjusted
                }
            },
            "prob_class": {},
            "admin2": {}
        }

        max_value = 0
        if len(values) != 0:
            max_value = max(values)

        summary["all"]["max"]["at_admin2_month"] = max_value

        prob_classes = [
            "0.01-0.1", "0.1-0.2", "0.2-0.3", "0.3-0.4", "0.4-0.5", "0.5-0.6",
            "0.6-0.7", "0.7-0.8", "0.8-0.9", "0.9-1.0"
        ]

        for prob_class in prob_classes:

            values = geodash_conn.exec_query_single_aslist(
                get_template(
                    "sparc2/sql/_cyclone_data_by_prob_class_month.sql").render(
                        {
                            'admin2_popatrisk': table_popatrisk,
                            'iso_alpha3': iso_alpha3,
                            'prob_class': prob_class
                        }))

            summary["prob_class"][prob_class] = {}
            summary["prob_class"][prob_class]['by_month'] = [
                float(x) for x in values
            ]

            rows = geodash_conn.exec_query_multiple(
                get_template(
                    "sparc2/sql/_cyclone_data_by_group_prob_class_month.sql").
                render({
                    'admin2_popatrisk': table_popatrisk,
                    'iso_alpha3': iso_alpha3,
                    'prob_class': prob_class,
                    'group': 'admin2_code'
                }))

            for row in rows:
                admin2_code, values = row
                if admin2_code not in summary["admin2"]:
                    summary["admin2"][admin2_code] = {"prob_class": {}}
                if "prob_class" not in summary["admin2"][admin2_code]:
                    summary["admin2"][admin2_code]["prob_class"] = {}
                if prob_class not in summary["admin2"][admin2_code][
                        "prob_class"]:
                    summary["admin2"][admin2_code]["prob_class"][
                        prob_class] = {}
                summary["admin2"][admin2_code]["prob_class"][prob_class][
                    'by_month'] = [float(x) for x in values.split(",")]

    summary['header'] = {
        'all_breakpoints_natural':
        len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted':
        len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2':
        len(summary["admin2"].keys()),
        'prob_classes':
        prob_classes
    }

    return summary
Exemplo n.º 4
0
def get_summary_flood(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso3 for get_summary_flood.")

    num_breakpoints = 5

    connection = psycopg2.connect(settings.GEODASH_DB_CONN_STR)
    cursor = connection.cursor()

    values = data_local_country_hazard_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        hazard="flood",
        template="sparc2/sql/_hazard_data_all.sql",
        table=table_popatrisk)

    natural = calc_breaks_natural(values, num_breakpoints)

    summary = {
        'all': {
            "max": {
              'at_country_month': None,
              'at_admin2_month': None
            },
            'breakpoints': {
                'natural': natural,
                'natural_adjusted': None
            }
        },
        "rp": {},
        "admin2": {}
    }

    returnPeriods = [25, 50, 100, 200, 500, 1000]
    values_by_rp = {}
    for rp in returnPeriods:
        q2 = get_template("sparc2/sql/_flood_data_all_at_admin2.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso_alpha3': iso_alpha3,
            'rp': rp})
        cursor.execute(q2)
        values = None
        try:
            values = cursor.fetchone()[0].split(",")
        except:
            values = []  # No values since not effected by that disaster, e.g., Afghanistan and cyclones
        values_by_rp[str(rp)] = [float(x) for x in values]

    natural_adjusted = []
    values = values_by_rp["25"]
    breakpoints = calc_breaks_natural(values, num_breakpoints-2)
    natural_adjusted.extend(breakpoints)
    #print "<= RP 25"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    values = values + values_by_rp["50"] + values_by_rp["100"]
    breakpoints = calc_breaks_natural(values, num_breakpoints-1)
    if breakpoints[-2] > natural_adjusted[-2]:
        natural_adjusted.append(breakpoints[-2])
    else:
        natural_adjusted = breakpoints[:-1];
    #print "<= RP 100"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    values = values + values_by_rp["200"] + values_by_rp["500"] + values_by_rp["1000"]
    breakpoints = calc_breaks_natural(values, num_breakpoints)
    if breakpoints[-2] > natural_adjusted[-2]:
        natural_adjusted.append(breakpoints[-2])
    else:
        natural_adjusted = breakpoints[:-1];
    #print "<= RP 1000"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    max_value = 0
    if len(values) != 0:
        max_value = max(values)

    summary["all"]["max"]["at_admin2_month"] = max_value 
    summary["all"]["breakpoints"]["natural_adjusted"] = [0] + natural_adjusted + [max(values)]

    for rp in returnPeriods:
        # Breakpoints by RP
        summary["rp"][str(rp)] = {
            'breakpoints': {
                'natural': calc_breaks_natural(values_by_rp[str(rp)], num_breakpoints)
            }
        }
        ##########
        # Flood data by RP x month
        q3 = get_template("sparc2/sql/_flood_data_by_rp_month.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso3': iso_alpha3,
            'rp': rp})
        cursor.execute(q3)
        values = None
        try:
            values = cursor.fetchone()[0].split(",")
        except:
            values = []  # No values since not effected by that disaster, e.g., Afghanistan and cyclones
        summary["rp"][str(rp)]['by_month'] = [float(x) for x in values]
        ##########
        # Flood data by admin2 x RP x month
        q4 = get_template("sparc2/sql/_flood_data_by_admin2_rp_month.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso3': iso_alpha3,
            'rp': rp})
        cursor.execute(q4)
        for row in cursor.fetchall():
            admin2_code, values = row
            keys = [admin2_code, "rp", str(rp), 'by_month']
            value = [float(x) for x in values.split(",")]
            summary["admin2"] = insertIntoObject(summary["admin2"], keys, value)

    summary['header'] = {
        'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2': len(summary["admin2"].keys()),
        'returnPeriods': returnPeriods
    }

    return summary
Exemplo n.º 5
0
def get_summary_cyclone(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso_alpha3 for get_summary_cyclone.")

    summary = None

    with GeoDashDatabaseConnection() as geodash_conn:

        values = geodash_conn.exec_query_single_aslist(
            get_template("sparc2/sql/_cyclone_data_all_at_admin2.sql").render({
                'admin2_popatrisk': table_popatrisk,
                'iso_alpha3': iso_alpha3}))

        values = [float(x) for x in values]
        num_breakpoints = 5
        natural = calc_breaks_natural(values, num_breakpoints)
        natural_adjusted = natural

        summary = {
            'all': {
                "max": {
                  'at_country_month': None,
                  'at_admin2_month': None
                },
                'breakpoints': {
                    'natural': natural,
                    'natural_adjusted': [0] + natural_adjusted
                }
            },
            "prob_class": {},
            "admin2": {}
        }

        summary["all"]["max"]["at_admin2_month"] = max(values)

        prob_classes = [
          "0.01-0.1",
          "0.1-0.2",
          "0.2-0.3",
          "0.3-0.4",
          "0.4-0.5",
          "0.5-0.6",
          "0.6-0.7",
          "0.7-0.8",
          "0.8-0.9",
          "0.9-1.0"]

        for prob_class in prob_classes:

            values = geodash_conn.exec_query_single_aslist(
                get_template("sparc2/sql/_cyclone_data_by_prob_class_month.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'iso_alpha3': iso_alpha3,
                    'prob_class': prob_class}))

            summary["prob_class"][prob_class] = {};
            summary["prob_class"][prob_class]['by_month'] = [float(x) for x in values]

            rows = geodash_conn.exec_query_multiple(
                get_template("sparc2/sql/_cyclone_data_by_group_prob_class_month.sql").render({
                    'admin2_popatrisk': table_popatrisk,
                    'iso_alpha3': iso_alpha3,
                    'prob_class': prob_class,
                    'group': 'admin2_code'}))

            for row in rows:
                admin2_code, values = row
                if admin2_code not in summary["admin2"]:
                  summary["admin2"][admin2_code] = {"prob_class":{}}
                if "prob_class" not in summary["admin2"][admin2_code]:
                  summary["admin2"][admin2_code]["prob_class"] = {}
                if prob_class not in summary["admin2"][admin2_code]["prob_class"]:
                  summary["admin2"][admin2_code]["prob_class"][prob_class] = {}
                summary["admin2"][admin2_code]["prob_class"][prob_class]['by_month'] = [float(x) for x in values.split(",")]

    summary['header'] = {
        'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2': len(summary["admin2"].keys()),
        'prob_classes': prob_classes
    }

    return summary
Exemplo n.º 6
0
def get_summary_flood(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso3 for get_summary_flood.")

    num_breakpoints = 5

    connection = psycopg2.connect(settings.GEODASH_DB_CONN_STR)
    cursor = connection.cursor()

    values = data_local_country_hazard_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        hazard="flood",
        template="sparc2/sql/_hazard_data_all.sql",
        table=table_popatrisk)

    natural = calc_breaks_natural(values, num_breakpoints)

    summary = {
        'all': {
            "max": {
              'at_country_month': None,
              'at_admin2_month': None
            },
            'breakpoints': {
                'natural': natural,
                'natural_adjusted': None
            }
        },
        "rp": {},
        "admin2": {}
    }

    returnPeriods = [25, 50, 100, 200, 500, 1000]
    values_by_rp = {}
    for rp in returnPeriods:
        q2 = get_template("sparc2/sql/_flood_data_all_at_admin2.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso_alpha3': iso_alpha3,
            'rp': rp})
        cursor.execute(q2)
        values = None
        try:
            values = cursor.fetchone()[0].split(",")
        except:
            values = []  # No values since not effected by that disaster, e.g., Afghanistan and cyclones
        values_by_rp[str(rp)] = [float(x) for x in values]

    natural_adjusted = []
    values = values_by_rp["25"]
    breakpoints = calc_breaks_natural(values, num_breakpoints-2)
    natural_adjusted.extend(breakpoints)
    #print "<= RP 25"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    values = values + values_by_rp["50"] + values_by_rp["100"]
    breakpoints = calc_breaks_natural(values, num_breakpoints-1)
    if breakpoints[-2] > natural_adjusted[-2]:
        natural_adjusted.append(breakpoints[-2])
    else:
        natural_adjusted = breakpoints[:-1];
    #print "<= RP 100"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    values = values + values_by_rp["200"] + values_by_rp["500"] + values_by_rp["1000"]
    breakpoints = calc_breaks_natural(values, num_breakpoints)
    if breakpoints[-2] > natural_adjusted[-2]:
        natural_adjusted.append(breakpoints[-2])
    else:
        natural_adjusted = breakpoints[:-1];
    #print "<= RP 1000"
    #print "Breakpoints", breakpoints
    #print "Natural Adjusted", natural_adjusted
    #
    summary["all"]["max"]["at_admin2_month"] = max(values)
    summary["all"]["breakpoints"]["natural_adjusted"] = [0] + natural_adjusted + [max(values)]

    for rp in returnPeriods:
        # Breakpoints by RP
        summary["rp"][str(rp)] = {
            'breakpoints': {
                'natural': calc_breaks_natural(values_by_rp[str(rp)], num_breakpoints)
            }
        }
        ##########
        # Flood data by RP x month
        q3 = get_template("sparc2/sql/_flood_data_by_rp_month.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso3': iso_alpha3,
            'rp': rp})
        cursor.execute(q3)
        values = None
        try:
            values = cursor.fetchone()[0].split(",")
        except:
            values = []  # No values since not effected by that disaster, e.g., Afghanistan and cyclones
        summary["rp"][str(rp)]['by_month'] = [float(x) for x in values]
        ##########
        # Flood data by admin2 x RP x month
        q4 = get_template("sparc2/sql/_flood_data_by_admin2_rp_month.sql").render({
            'admin2_popatrisk': table_popatrisk,
            'iso3': iso_alpha3,
            'rp': rp})
        cursor.execute(q4)
        for row in cursor.fetchall():
            admin2_code, values = row
            keys = [admin2_code, "rp", str(rp), 'by_month']
            value = [float(x) for x in values.split(",")]
            summary["admin2"] = insertIntoObject(summary["admin2"], keys, value)

    summary['header'] = {
        'all_breakpoints_natural': len(summary["all"]["breakpoints"]["natural"]),
        'all_breakpoints_natural_adjusted': len(summary["all"]["breakpoints"]["natural_adjusted"]),
        'admin2': len(summary["admin2"].keys()),
        'returnPeriods': returnPeriods
    }

    return summary
Exemplo n.º 7
0
def get_summary_context(table_context=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_context) or (not iso_alpha3):
        raise Exception("Missing table_context or iso3 for get_summary_context.")

    num_breakpoints = 7

    connection = psycopg2.connect(settings.GEODASH_DB_CONN_STR)
    cursor = connection.cursor()

    values_delta_mean = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_mean",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_mean = [float(x) for x in values_delta_mean]
    values_delta_mean_negative = [x for x in values_delta_mean if x <= 0.0]
    values_delta_mean_positive = [x for x in values_delta_mean if x >= 0.0]
    natural_mean = calc_breaks_natural(values_delta_mean, 6)
    if len(values_delta_mean_negative) == 0:
        natural_mean_negative = [0, 0, 0]
    elif len(values_delta_mean_negative) == 1:
        natural_mean_negative = [
            values_delta_mean_negative[0],
            values_delta_mean_negative[0],
            values_delta_mean_negative[0]]
    else:
        natural_mean_negative = calc_breaks_natural(values_delta_mean_negative, 2)
    if len(values_delta_mean_positive) == 0:
        natural_mean_positive = [0, 0, 0]
    elif len(values_delta_mean_positive) == 1:
        natural_mean_positive = [
            values_delta_mean_positive[0],
            values_delta_mean_positive[0],
            values_delta_mean_positive[0]]
    else:
        natural_mean_positive = calc_breaks_natural(values_delta_mean_positive, 2)
    #####
    values_delta_negative = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_negative",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_negative = [float(x) for x in values_delta_negative]
    natural_negative = calc_breaks_natural(values_delta_negative, 3)
    #####
    values_delta_positive = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_positive",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_positive = [float(x) for x in values_delta_positive]
    natural_positive = calc_breaks_natural(values_delta_positive, 3)
    #####
    values_erosion_propensity = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="erosion_propensity",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_erosion_propensity = [float(x) for x in values_erosion_propensity]
    natural_erosion_propensity = calc_breaks_natural(values_erosion_propensity, 3)
    #####
    values_delta_crop = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_crop",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_crop = [float(x) for x in values_delta_crop]
    natural_crop_negative = calc_breaks_natural([x for x in values_delta_crop if x <= 0.0], 2)
    natural_crop_positive = calc_breaks_natural([x for x in values_delta_crop if x >= 0.0], 2)
    #####
    values_delta_forest = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_forest",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_forest = [float(x) for x in values_delta_forest]
    #############
    # values_delta_forest_negative
    values_delta_forest_negative = [x for x in values_delta_forest if x <= 0.0]
    if len(values_delta_forest_negative) == 0:
        natural_forest_negative = [0, 0, 0]
    elif len(values_delta_forest_negative) == 1:
        natural_forest_negative = [
            values_delta_forest_negative[0],
            values_delta_forest_negative[0],
            values_delta_forest_negative[0]]
    else:
        natural_forest_negative = calc_breaks_natural(values_delta_forest_negative, 2)
    #############
    # values_delta_forest_positive
    values_delta_forest_positive = [x for x in values_delta_forest if x >= 0.0]
    if len(values_delta_forest_positive) == 0:
        natural_forest_positive = [0, 0, 0]
    elif len(values_delta_forest_positive) == 1:
        natural_forest_positive = [
            values_delta_forest_positive[0],
            values_delta_forest_positive[0],
            values_delta_forest_positive[0]]
    else:
        natural_forest_positive = calc_breaks_natural(values_delta_forest_positive, 2)
    #####

    summary = {
        'all': {
            "min": {
              'at_admin2_month': (min(values_delta_mean) if values_delta_mean else None)
            },
            "max": {
              'at_admin2_month': (max(values_delta_mean) if values_delta_mean else None)
            },
            'breakpoints': {
                'natural': natural_mean,
                'natural_adjusted': natural_mean_negative + [0] + natural_mean_positive,
                'natural_negative': natural_negative,
                'natural_positive': natural_positive,
                'natural_erosion_propensity': [0] + natural_erosion_propensity,
                'natural_crop': natural_crop_negative + [0] + natural_crop_positive,
                'natural_forest': natural_forest_negative + [0] + natural_forest_positive
            }
        }
    }

    return summary
Exemplo n.º 8
0
def get_summary_landslide(table_popatrisk=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_popatrisk) or (not iso_alpha3):
        raise Exception("Missing table_popatrisk or iso3 for get_summary_flood.")

    summary = {
        'all': {
            "max": {
              'at_country_month': None,
              'at_admin2_month': None
            },
            'breakpoints': {
                'natural': None,
                'natural_adjusted': None
            },
            "by_month": None
        },
        "admin2": {}
    }

    num_breakpoints = 5

    connection = psycopg2.connect(settings.GEODASH_DB_CONN_STR)
    cursor = connection.cursor()

    values = data_local_country_hazard_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        hazard="landslide",
        template="sparc2/sql/_hazard_data_all.sql",
        table=table_popatrisk)

    natural = calc_breaks_natural(values, num_breakpoints)
    values_as_integer = [int(x) for x in values]
    natural_adjusted = natural

    summary["all"]["max"]["at_admin2_month"] = max(values_as_integer)
    summary["all"]["breakpoints"]["natural"] = natural
    summary["all"]["breakpoints"]["natural_adjusted"] =  [0] + natural_adjusted + [max(values_as_integer)]

    with GeoDashDatabaseConnection() as geodash_conn:
        values = geodash_conn.exec_query_single_aslist(
            get_template("sparc2/sql/_landslide_data_by_month.sql").render({
                'admin2_popatrisk': 'landslide.admin2_popatrisk',
                'iso_alpha3': iso_alpha3}))

        print "values by month: ", values

        summary["all"]["by_month"] = [int(x) for x in values]

        rows = geodash_conn.exec_query_multiple(
            get_template("sparc2/sql/_landslide_data_by_admin2_month_asjson.sql").render({
                'admin2_popatrisk': 'landslide.admin2_popatrisk',
                'iso_alpha3': iso_alpha3}))

        values_by_admin2 = {}
        for row in rows:
            admin2_code, data = row
            data.pop(u"admin2_code")
            summary["admin2"][str(admin2_code)] = {}
            summary["admin2"][str(admin2_code)]["by_month"] = valuesByMonthToList(data)


    return summary
Exemplo n.º 9
0
def get_summary_context(table_context=None, iso_alpha3=None):
    now = datetime.datetime.now()
    current_month = now.strftime("%b").lower()

    if (not table_context) or (not iso_alpha3):
        raise Exception(
            "Missing table_context or iso3 for get_summary_context.")

    num_breakpoints = 7

    connection = psycopg2.connect(settings.GEODASH_DB_CONN_STR)
    cursor = connection.cursor()

    values_delta_mean = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_mean",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_mean = [float(x) for x in values_delta_mean]
    values_delta_mean_negative = [x for x in values_delta_mean if x <= 0.0]
    values_delta_mean_positive = [x for x in values_delta_mean if x >= 0.0]
    natural_mean = calc_breaks_natural(values_delta_mean, 6)
    if len(values_delta_mean_negative) == 0:
        natural_mean_negative = [0, 0, 0]
    elif len(values_delta_mean_negative) == 1:
        natural_mean_negative = [
            values_delta_mean_negative[0], values_delta_mean_negative[0],
            values_delta_mean_negative[0]
        ]
    else:
        natural_mean_negative = calc_breaks_natural(values_delta_mean_negative,
                                                    2)
    if len(values_delta_mean_positive) == 0:
        natural_mean_positive = [0, 0, 0]
    elif len(values_delta_mean_positive) == 1:
        natural_mean_positive = [
            values_delta_mean_positive[0], values_delta_mean_positive[0],
            values_delta_mean_positive[0]
        ]
    else:
        natural_mean_positive = calc_breaks_natural(values_delta_mean_positive,
                                                    2)
    #####
    values_delta_negative = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_negative",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_negative = [float(x) for x in values_delta_negative]
    natural_negative = calc_breaks_natural(values_delta_negative, 3)
    #####
    values_delta_positive = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_positive",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_positive = [float(x) for x in values_delta_positive]
    natural_positive = calc_breaks_natural(values_delta_positive, 3)
    #####
    values_erosion_propensity = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="erosion_propensity",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_erosion_propensity = [float(x) for x in values_erosion_propensity]
    natural_erosion_propensity = calc_breaks_natural(values_erosion_propensity,
                                                     3)
    #####
    values_delta_crop = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_crop",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_crop = [float(x) for x in values_delta_crop]
    natural_crop_negative = calc_breaks_natural(
        [x for x in values_delta_crop if x <= 0.0], 2)
    natural_crop_positive = calc_breaks_natural(
        [x for x in values_delta_crop if x >= 0.0], 2)
    #####
    values_delta_forest = data_local_country_context_all().get(
        cursor=cursor,
        iso_alpha3=iso_alpha3,
        attribute="delta_forest",
        template="sparc2/sql/_admin2_data_all.sql",
        table=table_context)
    values_delta_forest = [float(x) for x in values_delta_forest]
    #############
    # values_delta_forest_negative
    values_delta_forest_negative = [x for x in values_delta_forest if x <= 0.0]
    if len(values_delta_forest_negative) == 0:
        natural_forest_negative = [0, 0, 0]
    elif len(values_delta_forest_negative) == 1:
        natural_forest_negative = [
            values_delta_forest_negative[0], values_delta_forest_negative[0],
            values_delta_forest_negative[0]
        ]
    else:
        natural_forest_negative = calc_breaks_natural(
            values_delta_forest_negative, 2)
    #############
    # values_delta_forest_positive
    values_delta_forest_positive = [x for x in values_delta_forest if x >= 0.0]
    if len(values_delta_forest_positive) == 0:
        natural_forest_positive = [0, 0, 0]
    elif len(values_delta_forest_positive) == 1:
        natural_forest_positive = [
            values_delta_forest_positive[0], values_delta_forest_positive[0],
            values_delta_forest_positive[0]
        ]
    else:
        natural_forest_positive = calc_breaks_natural(
            values_delta_forest_positive, 2)
    #####

    summary = {
        'all': {
            "min": {
                'at_admin2_month':
                (min(values_delta_mean) if values_delta_mean else None)
            },
            "max": {
                'at_admin2_month':
                (max(values_delta_mean) if values_delta_mean else None)
            },
            'breakpoints': {
                'natural':
                natural_mean,
                'natural_adjusted':
                natural_mean_negative + [0] + natural_mean_positive,
                'natural_negative':
                natural_negative,
                'natural_positive':
                natural_positive,
                'natural_erosion_propensity': [0] + natural_erosion_propensity,
                'natural_crop':
                natural_crop_negative + [0] + natural_crop_positive,
                'natural_forest':
                natural_forest_negative + [0] + natural_forest_positive
            }
        }
    }

    return summary