Пример #1
0
def _make_odata_filter(from_date, to_date, region_id, observer_id, geohazard_tid=None):
    """Based on what is requested, this mehod builds the odata filter par of the request url.

    :param from_date:       [date] A query returns [from_date, to_date>
    :param to_date:         [date] A query returns [from_date, to_date>
    :param region_ids:      [int or list of ints] If region_ids = None, all regions are selected
    :param observer_ids:    [int or list of ints] If observer_ids = None, all observers are selected
    :param geohazard_tid:   [int] 10 is snow, 20,30,40 are dirt, 60 is water and 70 is ice

    :return:                [string] filter part of request url
    """

    odata_filter = "DtObsTime gt datetime'{0}' and " \
                   "DtObsTime lt datetime'{1}' and ".format(from_date, to_date)

    if region_id is not None:
        forecast_region_kdv = kdv.get_kdv("ForecastRegionKDV")
        region_name = forecast_region_kdv[region_id].Name
        odata_filter += "ForecastRegionName eq '{0}' and ".format(region_name)

    if observer_id is not None:
        odata_filter += "ObserverId eq {0} and ".format(observer_id)

    if geohazard_tid is not None:
        odata_filter += "GeoHazardTID eq {0} and ".format(geohazard_tid)

    odata_filter += "LangKey eq 1"
    odata_filter = fe.add_norwegian_letters(odata_filter)

    return odata_filter
def save_danger_and_problem_to_file(warnings, file_path):
    """
    Saves a list of warning and problems to file.

    :param warnings:
    :param file_path:
    :return:
    """

    if os.path.exists(file_path) == False:
        l = open(file_path, 'w')
        l.write('{0}\t{1}\t{2}\t{3}\t{4}\n'
               .format('Dato', 'Region', 'Faregrad', 'Faregrad', "Svakt lag"))
    else:
        l = open(file_path, 'a')

    for w in warnings:
        use_encoding = 'utf8'
        #use_encoding = 'latin-1'

        date = w.date
        region = w.region_name
        region = fe.add_norwegian_letters(region, use_encoding=use_encoding)
        danger_level = w.danger_level
        danger_level_name = w.danger_level_name

        for p in w.avalanche_problems:
            problem_combined = p.problem_combined
            problem_combined = fe.add_norwegian_letters(problem_combined)

            if (region != "") and (region != "Hemsedal Skisenter"):
                # add norwegian letters
                s = u'{0}\t{1}\t{2}\t{3}\t{4}\n'.format(
                    date,
                    region,
                    danger_level,
                    danger_level_name,
                    problem_combined)

                l.write(s.encode(use_encoding))
    l.close()
Пример #3
0
def get_observed_danger_AvalancheEvaluation3V(region_id, start_date, end_date):
    '''

    :param region_id:
    :param start_date:
    :param end_date:
    :return:
    '''


    region_name = get_forecast_region_name(region_id)
    oDataQuery = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    oDataQuery = fe.add_norwegian_letters(oDataQuery)    # Need norwegian letters in the URL

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/AvalancheEvaluation3V?$filter={1}&$format=json".decode('utf8').format(api_version, oDataQuery)
    AvalancheEvaluation3V = requests.get(url).json()
    avalEval3 = AvalancheEvaluation3V['d']['results']

    print 'getregobs.py -> get_observed_danger_AvalancheEvaluation3V: {0} observations for {1} in from {2} to {3}.'\
        .format(len(avalEval3), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(avalEval3) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        evaluations = get_observed_danger_AvalancheEvaluation3V(region_id, start_date, date_in_middle) \
               + get_observed_danger_AvalancheEvaluation3V(region_id, date_in_middle, end_date)
    else:

        evaluations = []

        if len(avalEval3) != 0:
            for e in avalEval3:
                date = unix_time_2_normal(int(e['DtObsTime'][6:-2]))
                danger_level = e['AvalancheDangerTID']
                danger_level_name = e['AvalancheDangerName']
                nick = e['NickName']
                forecast_correct = e['ForecastCorrectName']
                forecast_correct_id = e['ForecastCorrectTID']
                eval = gd.AvalancheDanger(region_id, region_name, "AvalancheEvaluation3V", date, danger_level, danger_level_name)
                eval.set_nick(nick)
                eval.set_source('Observasjon')
                eval.set_forecast_correct(forecast_correct, forecast_correct_id)
                evaluations.append(eval)

    # sort list by date
    #evaluations = sorted(evaluations, key=lambda AvalancheEvaluation: AvalancheEvaluation.date)

    return evaluations
Пример #4
0
def save_main_messages_to_file(main_messages, file_path):
    """
    Saves a list of main message objects to file.

    :param main_messages:
    :param file_path:
    :return:
    """

    if os.path.exists(file_path) == False:
        l = open(file_path, "w")
        l.write(
            "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\n".format(
                "Antall",
                "Faregrad",
                "Hovedskredproblem",
                "Skredproblem",
                "Skredtype",
                "Hovedbudskap norsk",
                "Hovedbudskap engelsk",
            )
        )
    else:
        l = open(file_path, "a")

    for m in main_messages:

        # sort the keys according to which is used most
        danger_levels = sorted(m.danger_levels.items(), key=operator.itemgetter(1), reverse=True)
        main_causes = sorted(m.main_causes.items(), key=operator.itemgetter(1), reverse=True)
        cause_names = sorted(m.cause_names.items(), key=operator.itemgetter(1), reverse=True)
        aval_types = sorted(m.aval_types.items(), key=operator.itemgetter(1), reverse=True)

        # join (the now after sorting, lists) to strings
        danger_levels = ", ".join("{!s} ({!r})".format(key, val) for (key, val) in danger_levels)
        main_causes = ", ".join("{!s} ({!r})".format(key, val) for (key, val) in main_causes)
        cause_names = ", ".join("{!s} ({!r})".format(key, val) for (key, val) in cause_names)
        aval_types = ", ".join("{!s} ({!r})".format(key, val) for (key, val) in aval_types)

        use_encoding = "utf8"
        # use_encoding = 'latin-1'

        # add norwegian letters
        danger_levels = fe.add_norwegian_letters(danger_levels, use_encoding=use_encoding)
        main_causes = fe.add_norwegian_letters(main_causes, use_encoding=use_encoding)
        cause_names = fe.add_norwegian_letters(cause_names, use_encoding=use_encoding)
        aval_types = fe.add_norwegian_letters(aval_types, use_encoding=use_encoding)

        main_message_no = fe.add_norwegian_letters(m.main_message_no, use_encoding=use_encoding)
        main_message_en = fe.add_norwegian_letters(m.main_message_en, use_encoding=use_encoding)

        s = u"{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\n".format(
            m.occurrences, danger_levels, main_causes, cause_names, aval_types, main_message_no, main_message_en
        )

        l.write(s.encode(use_encoding))
    l.close()
def save_danger_levels_to_file(warnings, file_path):
    """
    Saves a list of warning objects to file.

    :param warnings:
    :param file_path:
    :return:
    """

    if os.path.exists(file_path) == False:
        l = open(file_path, 'w')
        l.write('{0}\t{1}\t{2}\t{3}\n'
               .format('Dato', 'Region', 'Faregrad', 'Faregrad'))
    else:
        l = open(file_path, 'a')

    for w in warnings:

        use_encoding = 'utf8'
        #use_encoding = 'latin-1'

        date = w.date
        region = w.region_name
        region = fe.add_norwegian_letters(region, use_encoding=use_encoding)
        danger_level = w.danger_level
        danger_level_name = w.danger_level_name

        if (region != "") and (region != "Hemsedal Skisenter"):
            # add norwegian letters
            s = u'{0}\t{1}\t{2}\t{3}\n'.format(
                date,
                region,
                danger_level,
                danger_level_name)

            l.write(s.encode(use_encoding))
    l.close()
Пример #6
0
def get_trip(from_date, to_date, geohazard_tid=None, output='List'):
    """
    :param from_date:       [date] A query returns [from_date, to_date>
    :param to_date:         [date] A query returns [from_date, to_date>
    :param geohazard_tid:   [int] 10 is snow, 20,30,40 are dirt, 60 is water and 70 is ice

    :return:

    <entry>
        <id>http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/Trip(1)</id>
        <category term="RegObsModel.Trip" scheme="http://schemas.microsoft.com/ado/2007/08/dataservices/scheme" />
        <link rel="edit" title="Trip" href="Trip(1)" /><link rel="http://schemas.microsoft.com/ado/2007/08/dataservices/related/Observer" type="application/atom+xml;type=entry" title="Observer" href="Trip(1)/Observer" />
        <link rel="http://schemas.microsoft.com/ado/2007/08/dataservices/related/ObsLocation" type="application/atom+xml;type=entry" title="ObsLocation" href="Trip(1)/ObsLocation" />
        <title />
        <updated>2015-12-30T20:09:16Z</updated>
        <author>
            <name />
        </author>
        <content type="application/xml">
            <m:properties>
                <d:TripID m:type="Edm.Int32">1</d:TripID>
                <d:ObserverID m:type="Edm.Int32">1077</d:ObserverID>
                <d:ObsLocationID m:type="Edm.Int32">19063</d:ObsLocationID>
                <d:GeoHazardTID m:type="Edm.Int16">10</d:GeoHazardTID>
                <d:TripTypeTID m:type="Edm.Int32">20</d:TripTypeTID>
                <d:ObservationExpectedTime m:type="Edm.DateTime">2015-01-09T11:00:00</d:ObservationExpectedTime>
                <d:Comment></d:Comment>
                <d:IsFinished m:type="Edm.Boolean">true</d:IsFinished>
                <d:TripRegistrationTime m:type="Edm.DateTime">2015-01-09T09:11:59.263</d:TripRegistrationTime>
                <d:TripFinishedTime m:type="Edm.DateTime">2015-01-09T09:18:36.653</d:TripFinishedTime>
                <d:DeviceID m:type="Edm.Guid">835f5e39-a73a-48d3-2c7f-3c81c0492b87</d:DeviceID>
            </m:properties>
        </content>
    </entry>

    """

    odata_filter = ""

    if geohazard_tid is not None:
        odata_filter += "GeoHazardTID eq {0} and ".format(geohazard_tid)

    odata_filter += "TripRegistrationTime gt datetime'{0}' and TripRegistrationTime lt datetime'{1}'".format(from_date, to_date)


    odata_filter = fe.add_norwegian_letters(odata_filter)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/Trip/?$filter={1}&$format=json"\
        .decode('utf8').format(env.api_version, odata_filter)

    print "getmisc.py -> get_trip: ..to {0}".format(fe.remove_norwegian_letters(url))

    result = requests.get(url).json()
    data = result['d']['results']

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = to_date - from_date
        date_in_middle = from_date + time_delta/2
        data_out = get_trip(from_date, date_in_middle, geohazard_tid) + get_trip(date_in_middle, to_date, geohazard_tid)
    else:
        data_out = [Trip(d) for d in data]

    if output == 'List':
        return data_out
    elif output == 'csv':
        with open('{0}trips {1}-{2}.csv'.format(env.output_folder, from_date.strftime('%Y%m%d'), to_date.strftime('%Y%m%d')), 'wb') as f:
            w = csv.DictWriter(f, data_out[0].__dict__.keys(), delimiter=";")
            w.writeheader()
            for t in data_out:
                w.writerow(t.__dict__)
        return
Пример #7
0
def get_problems_from_AvalancheProblemV(region_id, start_date, end_date):
    '''
    Used for observations from 2012-01-01 upp until 2012-12-02
    Elrapp used the view longer but added only "Ikke gitt"

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:


    Example of data:
    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheProblemV(DtObsTime=datetime'2012-04-23T14:00:00',LangKey=1,NickName='H%C3%A5vardT%40met',RegID=2681,UTMEast=655106,UTMNorth=7763208,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheProblemV(DtObsTime=datetime'2012-04-23T14:00:00',LangKey=1,NickName='H%C3%A5vardT%40met',RegID=2681,UTMEast=655106,UTMNorth=7763208,UTMZone=33)",
            type: "RegObsModel.AvalancheProblemV"
        },
        RegID: 2681,
        DtObsTime: "/Date(1335189600000)/",
        DtRegTime: "/Date(1335186705637)/",
        LocationName: "Tromsø",
        UTMZone: 33,
        UTMEast: 655106,
        UTMNorth: 7763208,
        ForecastRegionName: "Tromsø",
        MunicipalName: "INGEN KOMMUNE",
        NickName: "HåvardT@met",
        CompetenceLevelName: "Ukjent",
        AvalancheProblemTID1: 2,
        AvalancheProblemTID2: 10,
        AvalancheProblemTID3: 0,
        AvalancheProblemName1: "Fokksnø",
        AvalancheProblemName2: "Solpåvirkning",
        AvalancheProblemName3: "Ikke gitt",
        LangKey: 1
    }
    '''

    region_name = get_forecast_region_name(region_id)
    view = "AvalancheProblemV"
    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)

    result = requests.get(url).json()
    result = result['d']['results']

    print 'getregobs.py -> get_problems_from_AvalancheProblemV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheProblemV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheProblemV(region_id, date_in_middle, end_date)
    else:
        problems = []
        if len(result) != 0:
            for p in result:

                date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()   # DtObsTime and data on Day0 gives best data
                cause_name1 = p["AvalancheProblemName1"]
                cause_name2 = p["AvalancheProblemName2"]
                cause_name3 = p["AvalancheProblemName3"]
                source = "Observasjon"

                if cause_name1 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 0, cause_name1, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    problems.append(prob)

                if cause_name2 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 1, cause_name2, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    problems.append(prob)

                if cause_name3 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 2, cause_name3, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    problems.append(prob)

    return problems
Пример #8
0
def get_problems_from_AvalancheWarnProblemV(region_id, start_date, end_date):
    '''AvalancheWarnProblemV used from 2012-11-15 to today. It selects only problems linked to published
    warnings.

    There was made changes to the view in des 2013 which I think affected destructive size and avalanche cause.
    Changes were made to the data model before startup in december 2014. Changes involve use of cause_name
    and avalanche size.

    Notes to do:
    * aval_type distingushes if aval type is dry or wet. Varsom does not this any more.
    * lots of typos in KDV_names. Inconsistent capitalization, blankspace at end of line.
    * exposed terain not included
    * no mapping to the manin categroies of avalanche problems.

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheWarnProblemV(AvalancheWarnProblemID=1,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2015-01-08T11%3A43%3A24.593',LangKey=1,NickName='Silje%40svv',ObsLocationID=39,RegID=45272,TMNorth=7763208,UTMEast=655106,UTMZone=33)",
            type: "RegObsModel.AvalancheWarnProblemV"
        },
        RegID: 45272,
        AvalancheWarnProblemID: 1,
        DtObsTime: "/Date(1420717404593)/",
        DtRegTime: "/Date(1420717404320)/",
        ObsLocationID: 39,
        LocationName: "Tromsø",
        UTMZone: 33,
        UTMEast: 655106,
        TMNorth: 7763208,
        ForecastRegionTID: 108,
        ForecastRegionName: "Tromsø",
        MunicipalName: "INGEN KOMMUNE",
        NickName: "Silje@svv",
        ObserverGroupID: 1,
        CompetenceLevelTID: 120,
        CompetenceLevelName: "***",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        DestructiveSizeExtTID: 0,
        DestructiveSizeExtName: "Ikke gitt",
        AvalancheExtTID: 20,
        AvalancheExtName: "Tørre flakskred",
        AvalCauseTID: 15,
        AvalCauseName: "Dårlig binding mellom lag i fokksnøen",
        AvalCauseExtTID: 0,
        AvalCauseExtName: "ikke gitt ",
        AvalReleaseHeightTID: 0,
        AvalReleaseHeighName: "Ikke gitt ",
        ProbabilityCombined: null,
        CauseCombined: null,
        ReleaseHeightCombined: "",
        AvalancheProblemCombined: "  ",
        Comment: null,
        LangKey: 1,
        ValidExposistion: "11111111",
        ExposedHeight1: 400,
        ExposedHeight2: 0,
        ExposedHeightComboTID: 1,
        DestructiveSizeTID: 1,
        DestructiveSizeName: "1 - Harmløst",
        SortOrder: 1,
        AvalPropagationTID: 1,
        AvalPropagationName: "Isolerte faresoner",
        AvalWeakLayerId: null,
        AdviceText: "Se etter områder hvor vinden nylig har lagt fra seg fokksnø, typisk bak rygger, i renneformasjoner og søkk. Lokale vindeffekter og skiftende vindretning kan gi stor variasjon i hvor fokksnøen legger seg. Snø som sprekker opp rundt skiene/brettet er et typisk tegn. Unngå områder med fokksnø til den har fått stabilisert seg. Det er størst sannsynlighet for å løse ut skred på kul-formasjoner i terrenget og der fokksnøen er myk."
    }


    '''
    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheWarnProblemV"

    # Note this view queries on LocationName and not on ForeCastRegionName as the other views
    odata_query = "DtObsTime gt datetime'{1}' and " \
             "DtObsTime lt datetime'{2}' and " \
             "LocationName eq '{0}' and " \
             "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)
    result = requests.get(url).json()
    try:
        result = result['d']['results']
    except:
        result = []

    print 'getregobs.py -> get_problems_from_AvalancheWarnProblemV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheWarnProblemV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheWarnProblemV(region_id, date_in_middle, end_date)
    else:
        valid_regids = fa.get_valid_regids(region_id-100, start_date, end_date)
        problems = []
        if len(result) != 0:
            for p in result:
                regid = p["RegID"]

                if regid in valid_regids:

                    date = datetime.datetime.strptime(valid_regids[regid][0:10], '%Y-%m-%d').date()
                    source = "Varsel"

                    aval_cause_tid = int(p['AvalCauseTID']) + int(p['AvalCauseExtTID'])
                    cause_name = p["CauseCombined"]
                    aval_size = fe.remove_norwegian_letters(p['DestructiveSizeExtName'])
                    aval_type = p['AvalancheExtName']
                    aval_trigger = fe.remove_norwegian_letters(p['AvalTriggerSimpleName'])
                    aval_probability = fe.remove_norwegian_letters(p['AvalProbabilityName'])
                    aval_distribution = fe.remove_norwegian_letters(p['AvalPropagationName'])
                    aval_cause_combined = p['AvalancheProblemCombined']
                    problem_url = 'http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter=RegID eq {2} and LangKey eq 1&$format=json'.format(api_version, view, regid)

                    # from late november 2013 there was a change in data model
                    if date > datetime.datetime.strptime('2013-11-15', '%Y-%m-%d').date():
                        aval_cause_tid = int(p['AvalCauseTID'])
                        cause_name = aval_cause_kdv[aval_cause_tid].Name
                        aval_size = fe.remove_norwegian_letters(p['DestructiveSizeName'])
                        aval_probability = fe.remove_norwegian_letters(p['AvalProbabilityName'])
                        aval_distribution = fe.remove_norwegian_letters(p['AvalPropagationName'])
                        aval_cause_combined = p['AvalCauseName']

                        # http://www.varsom.no/Snoskred/Senja/?date=18.03.2015
                        varsom_name = region_name.replace('æ','a').replace('ø','o').replace('å','a')
                        varsom_date = date.strftime("%d.%m.%Y")
                        problem_url = "http://www.varsom.no/Snoskred/{0}/?date={1}".format(varsom_name, varsom_date)

                    if cause_name is not None and aval_cause_tid != 0:
                        order = int(p["AvalancheWarnProblemID"])
                        prob = gp.AvalancheProblem(region_id, region_name, date, order, cause_name, source)
                        prob.set_aval_type(aval_type)
                        prob.set_aval_size(aval_size)
                        prob.set_aval_trigger(aval_trigger)
                        prob.set_aval_distribution(aval_distribution)
                        prob.set_aval_probability(aval_probability)
                        prob.set_problem_combined(aval_cause_combined)
                        prob.set_regobs_view(view)
                        prob.set_url(problem_url)
                        prob.set_cause_tid(aval_cause_tid)
                        prob.set_main_cause(cause_name)
                        problems.append(prob)

    return problems
Пример #9
0
def get_problems_from_AvalancheWarningV(region_id, start_date, end_date):
    '''

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    Used for warnings 2012-01 to 2012-07

    results: [
        {
        __metadata: {
        id: "http://api.nve.no/hydrology/RegObs/v0.9.8/OData.svc/AvalancheWarningV(AvalancheEvaluation='Det%20har%20bl%C3%A5st%20nordlig%20stiv%20kuling%20i%20fjellet%20siste%20d%C3%B8gn.%205-10%20cm%20nysn%C3%B8%20siste%20to%20d%C3%B8gn.%0D%0AI%20begynnelsen%20av%20uken%20ble%20det%20observert%20mindre%20skred%20p%C3%A5%20grunn%20av%20solinnstr%C3%A5ling.%20Observasjoner%20viser%20at%20nysn%C3%B8en%20fra%20siste%20nedb%C3%B8rperiode%20n%C3%A5%20har%20festet%20seg%20bra%20til%20det%20gamle%20sn%C3%B8dekket%2C%20spesielt%20i%20S%C3%98-vendte%20fjellsider.%20Flakene%20krever%20dog%20fortsatt%20oppmerksomhet.%20Det%20er%20observert%20rimkrystaller%20i%20h%C3%B8yden%20i%20fjellsider%20med%20skygge%2C%20men%20det%20er%20usikkert%20hvorvidt%20disse%20fortsatt%20er%20intakte.%20',AvalancheWarning='Det%20ventes%20%C3%A5%20dannes%20et%20polart%20lavtrykk%20i%20havet%20utenfor%20Troms%20natt%20til%20fredag.%20Plasseringen%20av%20dette%20er%20enn%C3%A5%20usikker%2C%20men%20der%20det%20treffer%20land%20vil%20det%20bli%20kortvarig%20mye%20vind%20fra%20nord%20og%20kraftige%20sn%C3%B8-%20og%20haglbyger.%20Vinden%20ventes%20%C3%A5%20komme%20opp%20i%20stiv%20kuling.%0D%0ANedb%C3%B8r%20som%20torsdag%20og%20fredag%20kommer%20med%20vind%20fra%20N%20og%20NV%20kan%20danne%20flak%20i%20leheng.%20Disse%20vil%20trolig%20kunne%20l%C3%B8ses%20ut%20med%20liten%20tilleggsbelastning%20i%20bratt%20terreng.%20I%20fjellsider%20der%20rimkrystaller%20var%20intakte%20f%C3%B8r%20de%20sn%C3%B8dde%20ned%2C%20typiske%20i%20sider%20med%20skygge%2C%20vil%20det%20v%C3%A6re%20ustabile%20forhold%20dersom%20det%20legger%20seg%20flak%20av%20betydning.%20L%C3%B8rdag%20minking%20til%20skiftende%20bris%20og%20mye%20pent%20v%C3%A6r.%20L%C3%B8rdag%20vil%20det%20kunne%20l%C3%B8ses%20ut%20l%C3%B8ssn%C3%B8skred%20i%20bratte%20soleksponerte%20fjellsider%20p%C3%A5%20grunn%20av%20solinnstr%C3%A5ling.%20Nedb%C3%B8rvarslet%20for%20fredag%20er%20usikkert%20og%20dersom%20det%20ikke%20kommer%20varslet%20nedb%C3%B8rmengde%20vil%20faregraden%20fredag%20og%20l%C3%B8rdag%20v%C3%A6re%202-moderat.%20',Day0AvalDangerTID=2,DtNextWarningTime=datetime'2012-04-16T16:00:00',DtValidToTime=datetime'2012-04-14T00:00:00',LangKey=1,NickName='Kalle%40NGI',ObserverGroupName='Sn%C3%B8skredvarslingen',ObsLocationID=39,RegID=2472,UTMEast=655106,UTMNorth=7763208,UTMZone=33)",
        uri: "http://api.nve.no/hydrology/RegObs/v0.9.8/OData.svc/AvalancheWarningV(AvalancheEvaluation='Det%20har%20bl%C3%A5st%20nordlig%20stiv%20kuling%20i%20fjellet%20siste%20d%C3%B8gn.%205-10%20cm%20nysn%C3%B8%20siste%20to%20d%C3%B8gn.%0D%0AI%20begynnelsen%20av%20uken%20ble%20det%20observert%20mindre%20skred%20p%C3%A5%20grunn%20av%20solinnstr%C3%A5ling.%20Observasjoner%20viser%20at%20nysn%C3%B8en%20fra%20siste%20nedb%C3%B8rperiode%20n%C3%A5%20har%20festet%20seg%20bra%20til%20det%20gamle%20sn%C3%B8dekket%2C%20spesielt%20i%20S%C3%98-vendte%20fjellsider.%20Flakene%20krever%20dog%20fortsatt%20oppmerksomhet.%20Det%20er%20observert%20rimkrystaller%20i%20h%C3%B8yden%20i%20fjellsider%20med%20skygge%2C%20men%20det%20er%20usikkert%20hvorvidt%20disse%20fortsatt%20er%20intakte.%20',AvalancheWarning='Det%20ventes%20%C3%A5%20dannes%20et%20polart%20lavtrykk%20i%20havet%20utenfor%20Troms%20natt%20til%20fredag.%20Plasseringen%20av%20dette%20er%20enn%C3%A5%20usikker%2C%20men%20der%20det%20treffer%20land%20vil%20det%20bli%20kortvarig%20mye%20vind%20fra%20nord%20og%20kraftige%20sn%C3%B8-%20og%20haglbyger.%20Vinden%20ventes%20%C3%A5%20komme%20opp%20i%20stiv%20kuling.%0D%0ANedb%C3%B8r%20som%20torsdag%20og%20fredag%20kommer%20med%20vind%20fra%20N%20og%20NV%20kan%20danne%20flak%20i%20leheng.%20Disse%20vil%20trolig%20kunne%20l%C3%B8ses%20ut%20med%20liten%20tilleggsbelastning%20i%20bratt%20terreng.%20I%20fjellsider%20der%20rimkrystaller%20var%20intakte%20f%C3%B8r%20de%20sn%C3%B8dde%20ned%2C%20typiske%20i%20sider%20med%20skygge%2C%20vil%20det%20v%C3%A6re%20ustabile%20forhold%20dersom%20det%20legger%20seg%20flak%20av%20betydning.%20L%C3%B8rdag%20minking%20til%20skiftende%20bris%20og%20mye%20pent%20v%C3%A6r.%20L%C3%B8rdag%20vil%20det%20kunne%20l%C3%B8ses%20ut%20l%C3%B8ssn%C3%B8skred%20i%20bratte%20soleksponerte%20fjellsider%20p%C3%A5%20grunn%20av%20solinnstr%C3%A5ling.%20Nedb%C3%B8rvarslet%20for%20fredag%20er%20usikkert%20og%20dersom%20det%20ikke%20kommer%20varslet%20nedb%C3%B8rmengde%20vil%20faregraden%20fredag%20og%20l%C3%B8rdag%20v%C3%A6re%202-moderat.%20',Day0AvalDangerTID=2,DtNextWarningTime=datetime'2012-04-16T16:00:00',DtValidToTime=datetime'2012-04-14T00:00:00',LangKey=1,NickName='Kalle%40NGI',ObserverGroupName='Sn%C3%B8skredvarslingen',ObsLocationID=39,RegID=2472,UTMEast=655106,UTMNorth=7763208,UTMZone=33)",
        type: "RegObsModel.AvalancheWarningV"
        },
        RegID: 2472,
        DtObsTime: "/Date(1334246400000)/",
        DtRegTime: "/Date(1334246249110)/",
        ObsLocationID: 39,
        LocationName: "Tromsø",
        UTMZone: 33,
        UTMEast: 655106,
        UTMNorth: 7763208,
        ForecastRegionName: "Tromsø",
        MunicipalName: "INGEN KOMMUNE",
        NickName: "Kalle@NGI",
        ObserverGroupName: "Snøskredvarslingen",
        ObserverGroupDescription: "Snøskredvarslingen i Norge utgir regionale varsler for snøskredfare. Varslene brukes på eget ansvar og ikke til kritiske avgjørelser alene. Snøskredvarslingen er et samarbeid mellom NVE, met.no, Statens vegvesen og Jernbaneverket.",
        CompetenceLevelName: "Ukjent",
        Day0AvalDangerTID: 2,
        Day0AvalDangerName: "2 Moderat",
        Day0ValidExposition: "11011100",
        Day0ValidHeightRelative: "001",
        Day1ValidExposition: "11011100",
        Day1ValidHeightRelative: "001",
        Day2ValidExposition: "11011100",
        Day2ValidHeightRelative: "001",
        Day0AvalProblemTID1: 2,
        Day0AvalProblemName1: "Fokksnø",
        Day0AvalProblemName2: "Ustabile lag i dekket",
        Day0AvalProblemName3: "Ikke gitt",
        Day1AvalProblemName1: "Fokksnø",
        Day1AvalProblemName2: "Ustabile lag i dekket",
        Day1AvalProblemName3: "Ikke gitt",
        Day2AvalProblemName1: "Fokksnø",
        Day2AvalProblemName2: "Solpåvirkning",
        Day2AvalProblemName3: "Ustabile lag i dekket",
        Day1AvalDangerTID: 3,
        Day1AvalDangerName: "3 Betydelig",
        Day2AvalDangerTID: 3,
        Day2AvalDangerName: "3 Betydelig",
        AvalancheWarning: "Det ventes å dannes et polart lavtrykk i havet utenfor Troms natt til fredag. Plasseringen av dette er ennå usikker, men der det treffer land vil det bli kortvarig mye vind fra nord og kraftige snø- og haglbyger. Vinden ventes å komme opp i stiv kuling.
        Nedbør som torsdag og fredag kommer med vind fra N og NV kan danne flak i leheng. Disse vil trolig kunne løses ut med liten tilleggsbelastning i bratt terreng. I fjellsider der rimkrystaller var intakte før de snødde ned, typiske i sider med skygge, vil det være ustabile forhold dersom det legger seg flak av betydning. Lørdag minking til skiftende bris og mye pent vær. Lørdag vil det kunne løses ut løssnøskred i bratte soleksponerte fjellsider på grunn av solinnstråling. Nedbørvarslet for fredag er usikkert og dersom det ikke kommer varslet nedbørmengde vil faregraden fredag og lørdag være 2-moderat. ",
        AvalancheEvaluation: "Det har blåst nordlig stiv kuling i fjellet siste døgn. 5-10 cm nysnø siste to døgn.
        I begynnelsen av uken ble det observert mindre skred på grunn av solinnstråling. Observasjoner viser at nysnøen fra siste nedbørperiode nå har festet seg bra til det gamle snødekket, spesielt i SØ-vendte fjellsider. Flakene krever dog fortsatt oppmerksomhet. Det er observert rimkrystaller i høyden i fjellsider med skygge, men det er usikkert hvorvidt disse fortsatt er intakte. ",
        Comment: null,
        DtNextWarningTime: "/Date(1334592000000)/",
        DtValidToTime: "/Date(1334361600000)/",
        LangKey: 1
        }
        ]
        '''

    region_name = get_forecast_region_name(region_id)
    view = "AvalancheWarningV"
    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)
    result = requests.get(url).json()
    try:
        result = result['d']['results']
    except:
        result = []

    print 'getregobs.py -> get_problems_from_AvalancheWarningV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheWarningV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheWarningV(region_id, date_in_middle, end_date)
    else:
        problems = []
        if len(result) != 0:
            for p in result:

                date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()   # DtObsTime and data on Day0 gives best data
                cause_name1 = p["Day0AvalProblemName1"]
                cause_name2 = p["Day0AvalProblemName2"]
                cause_name3 = p["Day0AvalProblemName3"]
                source = "Varsel"

                # http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheWarningV?$filter=RegID%20eq%202472%20and%20LangKey%20eq%201&$format=json
                view_url_base = 'http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheWarningV?$filter=RegID%20eq%20{0}%20and%20LangKey%20eq%201&$format=json'

                if cause_name1 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 0, cause_name1, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url(view_url_base.format(p['RegID']))
                    problems.append(prob)

                if cause_name2 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 1, cause_name2, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url(view_url_base.format(p['RegID']))
                    problems.append(prob)

                if cause_name3 != "Ikke gitt":
                    prob = gp.AvalancheProblem(region_id, region_name, date, 2, cause_name3, source)
                    prob.set_regobs_view(view)
                    prob.set_regid(p['RegID'])
                    prob.set_url(view_url_base.format(p['RegID']))
                    problems.append(prob)

    return problems
Пример #10
0
def get_problems_from_AvalancheEvalProblem2V(region_id, start_date, end_date):
    '''Used from 2014-02-10 up to today

    http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheEvalProblem2V?$filter=DtObsTime%20gt%20datetime%272012-01-10%27%20and%20DtObsTime%20lt%20datetime%272015-01-15%27%20and%20ForecastRegionName%20eq%20%27Senja%27%20and%20LangKey%20eq%201&$format=json
    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:

    Datasample:
    {
        __metadata: {
            id: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            uri: "http://api.nve.no/hydrology/RegObs/v0.9.9/OData.svc/AvalancheEvalProblem2V(AvalancheEvalProblemID=0,AvalProbabilityName='Mulig%20',DtObsTime=datetime'2014-04-27T19%3A00%3A00',LangKey=1,NickName='MagnusH%40obskorps',ObsLocationID=11031,RegID=34540,UTMEast=639918,UTMNorth=7731868,UTMZone=33)",
            type: "RegObsModel.AvalancheEvalProblem2V"
        },
        RegID: 34540,
        AvalancheEvalProblemID: 0,
        DtObsTime: "/Date(1398625200000)/",
        DtRegTime: "/Date(1398633678170)/",
        ObsLocationID: 11031,
        LocationName: "Steinskardtind",
        UTMZone: 33,
        UTMEast: 639918,
        UTMNorth: 7731868,
        ForecastRegionName: "Tromsø",
        MunicipalName: "TROMSØ",
        NickName: "MagnusH@obskorps",
        CompetenceLevelName: "****",
        AvalancheExtTID: 0,
        AvalancheExtName: "Ikke gitt ",
        AvalCauseTID: 22,
        AvalCauseName: "Opphopning av vann over skarelag",
        AvalCauseDepthTID: 2,
        AvalCauseDepthName: "Innen en meter",
        AvalCauseAttributes: 4,
        AvalCauseAttributeName: "Det overliggende laget er mykt.  ",
        DestructiveSizeTID: 2,
        DestructiveSizeName: "2 - Små",
        AvalTriggerSimpleTID: 10,
        AvalTriggerSimpleName: "Stor tilleggsbelastning ",
        AvalProbabilityTID: 3,
        AvalProbabilityName: "Mulig ",
        ValidExposition: "00000000",
        ExposedHeight1: 600,
        ExposedHeight2: 300,
        Comment: "Gammelt skarelag i ferd med å gå i oppløsning. Laget over og under er omvandlet til fuktig/våt grovkornet snø. Skarelaget bærer ikke lenger og kan kollapse. Ser ut til å ha liten evne til propagering, men glir ut som lite flak.",
        LangKey: 1
}

    '''

    region_name = get_forecast_region_name(region_id)
    aval_cause_kdv = gkdv.get_kdv('AvalCauseKDV')
    view = "AvalancheEvalProblem2V"

    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)

    result = requests.get(url).json()
    result = result['d']['results']

    print 'getregobs.py -> get_problems_from_AvalancheEvalProblem2V: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)


    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheEvalProblem2V(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheEvalProblem2V(region_id, date_in_middle, end_date)
    else:

        problems = []

        if len(result) != 0:
            for p in result:
                cause = int(p['AvalCauseTID'])
                if cause != 0:

                    date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()
                    order = int(p["AvalancheEvalProblemID"])
                    cause_tid = p['AvalCauseTID']
                    cause_name = aval_cause_kdv[cause_tid].Name
                    source = "Observasjon"

                    prob = gp.AvalancheProblem(region_id, region_name, date, order, cause_name, source)

                    prob.set_cause_tid(cause_tid)
                    prob.set_municipal(p['MunicipalName'])
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    prob.set_aval_size(p["DestructiveSizeName"])
                    prob.set_problem_combined(p['AvalCauseName'])
                    prob.set_regobs_view(view)
                    prob.set_nick_name(p['NickName'])

                    problems.append(prob)

    return problems
Пример #11
0
def get_problems_from_AvalancheEvalProblemV(region_id, start_date, end_date):
    '''Used for observations from 2012-11-29 up until 2014-05-13
    (elrapp used the view but added only "Ikke gitt")

    Eg url:
    http://api.nve.no/hydrology/regobs/v0.9.8/Odata.svc/AvalancheEvalProblemV?$filter=DtObsTime%20gt%20datetime%272012-01-10%27%20and%20DtObsTime%20lt%20datetime%272013-01-15%27%20and%20ForecastRegionName%20eq%20%27Senja%27%20and%20LangKey%20eq%201&$format=json

    :param region_name:
    :param region_id:
    :param start_date:
    :param end_date:
    :return:
    '''

    region_name = get_forecast_region_name(region_id)
    view = "AvalancheEvalProblemV"
    odata_query = "DtObsTime gt datetime'{1}' and " \
                 "DtObsTime lt datetime'{2}' and " \
                 "ForecastRegionName eq '{0}' and " \
                 "LangKey eq 1".format(region_name, start_date, end_date)
    odata_query = fe.add_norwegian_letters(odata_query)

    url = "http://api.nve.no/hydrology/regobs/{0}/Odata.svc/{1}?$filter={2}&$format=json".decode('utf8').format(
        api_version, view, odata_query)

    result = requests.get(url).json()
    result = result['d']['results']

    print 'getregobs.py -> get_problems_from_AvalancheEvalProblemV: {0} observations for {1} in from {2} to {3}.'\
        .format(len(result), region_id, start_date, end_date)

    # if more than 1000 elements are requested, odata truncates data to 1000. We do more requests
    if len(result) == 1000:
        time_delta = end_date - start_date
        date_in_middle = start_date + time_delta/2
        problems = get_problems_from_AvalancheEvalProblemV(region_id, start_date, date_in_middle) \
               + get_problems_from_AvalancheEvalProblemV(region_id, date_in_middle, end_date)
    else:
        problems = []

        if len(result) != 0:
            for p in result:

                cause = int(p['AvalCauseTID'])
                cause_ext = int(p["AvalCauseExtTID"])

                if cause != 0 and cause_ext != 0:

                    date = unix_time_2_normal(int(p['DtObsTime'][6:-2])).date()
                    order = int(p["AvalancheEvalProblemID"])
                    cause_name = fe.remove_norwegian_letters(p['AvalCauseName'])
                    cause_ext_name = fe.remove_norwegian_letters(p['AvalCauseExtName'])
                    cause_name = "{0}, {1}".format(cause_name, cause_ext_name)
                    source = "Observasjon"

                    prob = gp.AvalancheProblem(region_id, region_name, date, order, cause_name, source)

                    prob.set_municipal(p['MunicipalName'])
                    prob.set_regid(p['RegID'])
                    prob.set_url("{0}{1}".format(registration_basestring, prob.regid))
                    prob.set_aval_size(p["DestructiveSizeExtName"])
                    prob.set_problem_combined(p['AvalancheProblemCombined'])
                    prob.set_regobs_view(view)
                    prob.set_nick_name(p['NickName'])

                    problems.append(prob)

    return problems
def plot_danger_levels(region_name, start_date, end_date, danger_levels, aval_indexes):
    """Plots the danger levels as bars and makes a small cake diagram with distribution.

    :param region_name:     [String] Name of forecast region
    :param start_date:
    :param end_date:
    :param danger_levels
    :param aval_indexes

    :return:
    """

    filename = r"{0} faregrader {1}-{2}".format(region_name, start_date.strftime("%Y"), end_date.strftime("%y"))
    print ("Plotting {0}".format(filename))

    # Figure dimensions
    fsize = (16, 16)
    fig = plt.figure(figsize=fsize)
    plt.clf()

    ##########################################
    ###### First subplot with avalanche index
    ##########################################
    pplt.subplot2grid((6, 1), (0, 0), rowspan=1)

    index_dates = []
    data_indexes = []
    index_colors = []

    for i in aval_indexes:
        date = i.date
        index_dates.append(date)
        data_indexes.append(i.index)
        # color on the marker
        if i.index == 0:
            index_colors.append("white")
        elif i.index == 1:
            index_colors.append("pink")
        elif i.index >= 2 and i.index <= 5:
            index_colors.append("green")
        elif i.index >= 6 and i.index <= 9:
            index_colors.append("yellow")
        elif i.index >= 10 and i.index <= 12:
            index_colors.append("orange")
        elif i.index >= 13:
            index_colors.append("red")
        else:
            index_colors.append("pink")

    index_values = np.asarray(data_indexes, int)

    plt.scatter(index_dates, index_values, s=50.0, c=index_colors, alpha=0.5)
    plt.yticks(
        [1, 4, 6, 11, 17, 22], ["Ingen - 1", "Ett str2 - 4", "Ett str3 - 6", "Noen str3 - 11", "Mange str3 - 17", ""]
    )
    plt.ylabel("Skredindex")
    plt.xlim(start_date, end_date)

    title = fe.add_norwegian_letters(
        "Faregrad og skredindeks for {0} ({1}-{2})".format(
            region_name, start_date.strftime("%Y"), end_date.strftime("%y")
        )
    )
    plt.title(title)

    ##########################################
    ## Second subplot with avalanche danger forecast
    ##########################################
    pplt.subplot2grid((6, 1), (1, 0), rowspan=2)

    # Making the main plot
    dl_labels = ["", "1 - Liten", "2 - Moderat", "3 - Betydelig", "4 - Stor", ""]
    dl_colors = ["0.5", "#ccff66", "#ffff00", "#ff9900", "#ff0000", "k"]

    # Making a new dataset with both warned and evaluated data
    data_dates = []
    data_dangers = []

    for d in danger_levels:
        data_dates.append(d.date)
        if "Varsel" in d.source:
            data_dangers.append(d.danger_level)
        else:
            data_dangers.append(0.0 * d.danger_level)

    values = np.asarray(data_dangers, int)

    colors = []
    for n in values:
        if abs(n) == 1:
            colors.append(dl_colors[1])
        elif abs(n) == 2:
            colors.append(dl_colors[2])
        elif abs(n) == 3:
            colors.append(dl_colors[3])
        elif abs(n) == 4:
            colors.append(dl_colors[4])
        elif abs(n) == 5:
            colors.append(dl_colors[5])
        else:
            colors.append(dl_colors[0])

    plt.bar(data_dates, values, color=colors)
    plt.yticks(range(0, len(dl_labels), 1), dl_labels)  # , size='small')
    plt.ylabel("Varslet faregrad")
    plt.xlim(start_date, end_date)

    ##########################################
    ######### Third subplot with avalanche danger observed
    ##########################################
    pplt.subplot2grid((6, 1), (3, 0), rowspan=2)

    dl_labels = ["", "1 - Liten", "2 - Moderat", "3 - Betydelig", "4 - Stor", ""]
    dl_colors = ["0.5", "#ccff66", "#ffff00", "#ff9900", "#ff0000", "k"]

    # Making a new dataset with both warned and evaluated data
    data_dates = []
    data_dangers = []

    for d in danger_levels:
        data_dates.append(d.date)
        if not "Varsel" in d.source:
            data_dangers.append(-1.0 * d.danger_level)
        else:
            data_dangers.append(0.0 * d.danger_level)

    values = np.asarray(data_dangers, int)

    colors = []
    for n in values:
        if abs(n) == 1:
            colors.append(dl_colors[1])
        elif abs(n) == 2:
            colors.append(dl_colors[2])
        elif abs(n) == 3:
            colors.append(dl_colors[3])
        elif abs(n) == 4:
            colors.append(dl_colors[4])
        elif abs(n) == 5:
            colors.append(dl_colors[5])
        else:
            colors.append(dl_colors[0])

    plt.bar(data_dates, values, color=colors)
    plt.yticks(range(0, -len(dl_labels), -1), dl_labels)
    plt.ylabel("Observert faregrad")
    plt.xticks([])
    plt.xlim(start_date, end_date)

    ##########################################
    ######### Forth subplot with how well the forecast is
    ##########################################
    pplt.subplot2grid((6, 1), (5, 0), rowspan=1)
    plt.xlim(start_date, end_date)

    forecast_correct_values = []
    forecast_correct_colours = []
    forecast_correct_dates = []
    for d in danger_levels:
        if "Observasjon" in d.source:
            forecast_correct = d.danger_object.forecast_correct
            if forecast_correct is not None and not "Ikke gitt" in forecast_correct:
                forecast_correct_dates.append(d.date)
                if "riktig" in forecast_correct:
                    forecast_correct_values.append(0)
                    forecast_correct_colours.append("green")
                elif "for lav" in forecast_correct:
                    forecast_correct_values.append(-1)
                    forecast_correct_colours.append("red")
                elif "for hoey" in forecast_correct:
                    forecast_correct_values.append(1)
                    forecast_correct_colours.append("red")
                else:
                    forecast_correct_values.append(0)
                    forecast_correct_colours.append("pink")

    forecast_correct_np_values = np.asarray(forecast_correct_values, int)
    plt.scatter(forecast_correct_dates, forecast_correct_np_values, s=50.0, c=forecast_correct_colours, alpha=0.5)
    plt.yticks(range(-1, 2, 1), ["For lav", "Riktig", "    For hoey"])
    plt.ylabel("Stemmer varslet faregrad?")

    # this is an inset pie of the distribution of dangerlevels OVER the main axes
    xfrac = 0.15
    yfrac = (float(fsize[0]) / float(fsize[1])) * xfrac
    xpos = 0.45 - xfrac
    ypos = 0.95 - yfrac
    a = plt.axes([0.8, 0.66, 0.10, 0.10])
    # a = plt.axes([xpos, ypos, xfrac, yfrac])
    wDistr = np.bincount([d.danger_level for d in danger_levels if "Varsel" in d.source])
    a.pie(wDistr, colors=dl_colors, autopct="%1.0f%%", shadow=False)
    plt.setp(a, xticks=[], yticks=[])

    # this is an inset pie of the distribution of dangerlevels UNDER the main axes
    xfrac = 0.15
    yfrac = (float(fsize[0]) / float(fsize[1])) * xfrac
    xpos = 0.95 - xfrac
    ypos = 0.29 - yfrac
    b = plt.axes([0.8, 0.24, 0.10, 0.10])
    # b = plt.axes([xpos, ypos, xfrac, yfrac])
    eDistr = np.bincount([d.danger_level for d in danger_levels if "Observasjon" in d.source])
    b.pie(eDistr, colors=dl_colors, autopct="%1.0f%%", shadow=False)
    plt.setp(b, xticks=[], yticks=[])

    # figuretext in observed dangerlevels subplot
    w_number, e_number, fract_same = compare_danger_levels(danger_levels)
    fig.text(
        0.15,
        0.25,
        " Totalt {0} varslet faregrader og {1} observerte faregrader \n og det er {2}% samsvar mellom det som er observert og varslet.".format(
            w_number, e_number, int(round(fract_same * 100, 0))
        ),
        fontsize=14,
    )

    # fractions to the right in the forecast correct subplot
    forecast_correct_distr = {}
    for f in forecast_correct_values:
        if f in forecast_correct_distr.keys():
            forecast_correct_distr[f] += 1
        else:
            forecast_correct_distr[f] = 1

    if 1 in forecast_correct_distr.keys():
        fig.text(
            0.91,
            0.19,
            "{0}%".format(int(round(forecast_correct_distr[1] / float(len(forecast_correct_values)) * 100, 0))),
            fontsize=14,
        )
    if 0 in forecast_correct_distr.keys():
        fig.text(
            0.91,
            0.15,
            "{0}%".format(int(round(forecast_correct_distr[0] / float(len(forecast_correct_values)) * 100, 0))),
            fontsize=14,
        )
    if -1 in forecast_correct_distr.keys():
        fig.text(
            0.91,
            0.11,
            "{0}%".format(int(round(forecast_correct_distr[-1] / float(len(forecast_correct_values)) * 100, 0))),
            fontsize=14,
        )

    # This saves the figure to file
    plt.savefig("{0}{1}".format(env.web_images_folder, filename))  # ,dpi=90)
    fig.tight_layout()
    plt.close(fig)

    return
def plot_causes(region_name, from_date, to_date, causes):
    """

    :param causes:
    :return:
    """

    filename = r"{0} skredproblemer {1}-{2}".format(region_name, from_date.strftime("%Y"), to_date.strftime("%y"))
    print ("Plotting {0}".format(filename))

    AvalCauseKDV = gkdv.get_kdv("AvalCauseKDV")
    list_of_causes = [0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
    # list_of_causes = set([c.cause_tid for c in causes])
    list_of_cause_names = [fe.add_norwegian_letters(AvalCauseKDV[tid].Name) for tid in list_of_causes]

    dict_of_causes = {}
    for c in list_of_causes:
        dict_of_causes[c] = []
    for c in causes:
        dict_of_causes[c.cause_tid].append(c)

    # Start plotting
    fsize = (16, 7)
    plt.figure(figsize=fsize)
    plt.clf()

    # plot lines and left and bottom ticks
    y = 0
    for k, values in dict_of_causes.iteritems():
        for v in values:
            x = (v.date - from_date).days
            if "Varsel" in v.source:
                plt.hlines(y - 0.1, x, x + 1, lw=4, color="red")  # ofset the line 0.1 up
            if "Observasjon" in v.source:
                plt.hlines(y + 0.1, x, x + 1, lw=4, color="blue")  # ofset the line 0.1 down
        y += 1

    # Left y-axis labels
    plt.ylim(len(list_of_causes) - 1, -1)  # 16 skredproblemer
    plt.yticks(range(len(list_of_causes) + 1), list_of_cause_names)

    # x-axis labels
    axis_dates = []
    axis_positions = []
    for i in range(0, (to_date - from_date).days, 1):
        date = from_date + dt.timedelta(days=i)
        if date.day == 1:
            axis_dates.append(date.strftime("%b %Y"))
            axis_positions.append(i)
    plt.xticks(axis_positions, axis_dates)

    # Right hand side y-axis
    right_ticks = []
    correlation_sum = 0.0
    for k, values in dict_of_causes.iteritems():
        values_obs = [vo for vo in values if "Observasjon" in vo.source]
        values_fc = [vf for vf in values if "Varsel" in vf.source]
        correlation = 0.0
        for obs in values_obs:
            for fc in values_fc:
                if obs.date == fc.date and obs.cause_tid == fc.cause_tid:
                    correlation += 1
        if len(values_obs) == 0 and len(values_fc) == 0:
            right_ticks.append("")
        else:
            if len(values_obs) == 0:
                right_ticks.append("v{0} o{1} s{2}%".format(len(values_fc), len(values_obs), 0))
            else:
                right_ticks.append(
                    "v{0} o{1} s{2}%".format(len(values_fc), len(values_obs), int(correlation / len(values_obs) * 100))
                )
        correlation_sum += correlation
    right_ticks.reverse()
    plt.twinx()
    plt.ylim(-1, len(right_ticks) - 1)
    plt.yticks(range(len(right_ticks) + 1), right_ticks)

    # the title
    num_obs = len([c for c in causes if "Observasjon" in c.source])
    num_fc = len([c for c in causes if "Varsel" in c.source])
    if num_obs == 0:
        correlation_prct = 0
    else:
        correlation_prct = int(correlation_sum / num_obs * 100)

    title = "Skredproblemer for {0} ({1} - {2}) \n Totalt {3} varslede problemer (roed) og {4} observerte problemer (blaa) \n og det er {5}% samsvar mellom det som er observert og det som er varselt.".format(
        region_name, from_date.strftime("%Y%m%d"), to_date.strftime("%Y%m%d"), num_fc, num_obs, correlation_prct
    )
    title = fe.add_norwegian_letters(title)
    plt.title(title)

    fig = plt.gcf()
    fig.subplots_adjust(left=0.2)
    plt.savefig("{0}{1}".format(env.web_images_folder, filename))
    plt.close(fig)

    return
                myfile.write(',\n{{"source":{0},"target":{1},"value":{2}}}'.format(nv.node_id, nv.target_id, nv.value))
        myfile.write('\n]}')


    # Write to .csv
    with open(incident_ap_dl_csv, "w") as  myfile:
        myfile.write('Dato;Varslingsregion;URL;RegID;'#Observatoer;'
                     'Skadeomfang;Aktivitet;URL;Faregrad;'
                     'Skredproblem;Svakt lag;Skredproblem;Svakt lag;Skredproblem;Svakt lag\n')

        for i in incident_list:
            if i.forecast:
                skredproblemene = ''.join([';{0};{1}'.format(sp.main_cause, sp.cause_name) for sp in i.forecast.avalanche_problems])
                registration = 'http://www.regobs.no/Registration/{0}'.format(i.incident.RegID)
                varsom_forecast = 'http://www.varsom.no/Snoskred/{0}/?date={1}'.format(
                    fe.add_norwegian_letters(i.forecast.region_name).replace(u'å', 'a').replace(u'ø', 'o'),
                    (i.forecast.date).strftime('%d.%m.%Y'))

                table_row = '{0};{1};{2};{3};{4};{5};{6};{7};{8}{9}\n'.format(
                    i.forecast.date,
                    i.forecast.region_name,
                    registration,
                    i.incident.RegID,
                    #i.incident.NickName,
                    i.incident.DamageExtentName,
                    i.incident.ActivityInfluencedName,
                    varsom_forecast,
                    i.forecast.danger_level_name,
                    skredproblemene)

                myfile.write(table_row)
Пример #15
0
def plot_danger_levels(region_name, start_date, end_date, danger_levels):
    """Plots the danger levels as bars and makes a small cake diagram with distribution.

    :param region_name:     [String] Name of forecast region
    :param

    :return:
    """

    filename = r"{0} faregrader {1}-{2}".format(region_name, start_date.strftime('%Y'), end_date.strftime('%y'))
    print("Plotting {0}".format(filename))

    # Figure dimensions
    fsize = (16, 10)
    fig = plt.figure(figsize=fsize)
    plt.clf()

    # Making the main plot
    dl_labels = ['5 - Meget stor', '4 - Stor', '3 - Betydelig', '2 - Moderat', '1 - Liten', '0 - Ikke vurdert', '1 - Liten', '2 - Moderat', '3 - Betydelig', '4 - Stor', '5 - Meget stor']
    dl_colors = ['0.5', '#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    # Making a new dataset with both warned and evaluated data
    data_dates = []
    data_dangers = []

    for d in danger_levels:
        data_dates.append(d.date)
        if 'Varsel' in d.source:
            data_dangers.append(d.danger_level)
        else:
            data_dangers.append(-1*d.danger_level)

    values = np.asarray(data_dangers, int)

    colors = []
    for n in values:
        if abs(n) == 1:
            colors.append(dl_colors[1])
        elif abs(n) == 2:
            colors.append(dl_colors[2])
        elif abs(n) == 3:
            colors.append(dl_colors[3])
        elif abs(n) == 4:
            colors.append(dl_colors[4])
        elif abs(n) == 5:
            colors.append(dl_colors[5])
        else:
            colors.append(dl_colors[0])

    ax = plt.axes([.15, .05, .8, .9])
    ax.bar(data_dates, values, color=colors)

    plt.yticks(range(-len(dl_labels)/2+1, len(dl_labels)/2+1, 1), dl_labels)#, size='small')
    plt.ylabel('Faregrad')

    ax2 = ax.twinx()
    ax2.plot(data_dates, values, marker='*', linestyle='')

    plt.yticks(range(-len(dl_labels)/2+1, len(dl_labels)/2+1, 1), dl_labels)#, size='small')
    plt.ylabel('Faregrad')


    title = fe.add_norwegian_letters("Snoeskredfaregrad for {0} ({1}-{2})".format(region_name, start_date.strftime('%Y'), end_date.strftime('%y')))
    plt.title(title)

    w_number, e_number, fract_same = compare_danger_levels(danger_levels)
    fig.text(0.18, 0.13, " Totalt {0} varslet faregrader og {1} observerte faregrader \n og det er {2}% samsvar mellom det som er observert og varslet."
             .format(w_number, e_number, int(round(fract_same*100, 0))), fontsize = 14)

    # this is an inset pie of the distribution of dangerlevels OVER the main axes
    xfrac = 0.15
    yfrac = (float(fsize[0])/float(fsize[1])) * xfrac
    xpos = 0.95-xfrac
    ypos = 0.95-yfrac
    a = plt.axes([xpos, ypos, xfrac, yfrac])
    wDistr = np.bincount([d.danger_level for d in danger_levels if 'Varsel' in d.source])
    a.pie(wDistr, colors=dl_colors, autopct='%1.0f%%', shadow=False)
    plt.setp(a, xticks=[], yticks=[])

    # this is an inset pie of the distribution of dangerlevels UNDER the main axes
    xfrac = 0.15
    yfrac = (float(fsize[0])/float(fsize[1])) * xfrac
    xpos = 0.95-xfrac
    ypos = 0.29-yfrac
    b = plt.axes([xpos, ypos, xfrac, yfrac])
    eDistr = np.bincount([d.danger_level for d in danger_levels if 'Observasjon' in d.source])
    b.pie(eDistr, colors=dl_colors, autopct='%1.0f%%', shadow=False)
    plt.setp(b, xticks=[], yticks=[])

    # This saves the figure til file
    plt.savefig("{0}{1}".format(env.web_images_folder, filename))#,dpi=90)
    plt.close(fig)

    return